+ ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build relwithdebinfo -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.C9chXT6OHK --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-relwithdebinfo Configuring dependencies for platform tools [0 ymakes processing] [6924/6924 modules configured] Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [6930/6930 modules configured] [0 ymakes processing] [6930/6930 modules configured] [3516/3516 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 0.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a | 1.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a | 1.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ydb | 2.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a | 1.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd | 2.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/ydb_cli/commands/ydb_workload.cpp | 3.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut | 3.4%| PREPARE $(VCS) | 3.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load | 3.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tests/tpch/tpch | 3.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut | 3.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/tool | 4.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/topic_operations_scenario.cpp | 5.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_yql.cpp | 5.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_admin.cpp | 7.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_service_auth.cpp | 8.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_service_monitoring.cpp | 8.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_service_export.cpp | 9.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_service_scripting.cpp | 9.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_node_config.cpp | 9.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_service_operation.cpp | 9.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_service_scheme.cpp |10.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_workload_import.cpp |10.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_service_import.cpp |10.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_service_topic.cpp |11.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_sql.cpp |11.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_storage_config.cpp |11.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_root_common.cpp |11.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_service_table.cpp |11.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/libpy3olap-docs-generator.global.a |12.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/benchmark_utils.cpp |12.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_tools.cpp |12.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_latency.h_serialized.cpp |12.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_profile.cpp |13.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/topic_write_scenario.cpp |13.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_ping.h_serialized.cpp |14.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_dynamic_config.cpp |14.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_benchmark.cpp |14.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_ping.cpp |14.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_latency.cpp |14.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_benchmark.h_serialized.cpp |14.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_debug.cpp |14.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_cluster.cpp |14.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/topic_read_scenario.cpp |15.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |15.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |15.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/query_workload.cpp |15.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |15.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |15.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |15.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |15.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |15.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |15.7%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a |15.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |15.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |16.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |16.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |16.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/include/libclient-persqueue_public-include.a |16.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/proto/libsrc-client-proto.a |16.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |16.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |16.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |16.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/commands/topic_readwrite_scenario.cpp |16.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/impl/libclient-query-impl.a |16.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a |16.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a |17.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/libsrc-client-query.a |17.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |17.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.global.a |17.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |17.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |17.6%| PREPARE $(CLANG_FORMAT-1286082657) |17.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.global.a |17.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |18.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |18.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |18.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.global.a |18.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |18.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |19.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/jemalloc/libcpp-malloc-jemalloc.a |19.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |19.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a |19.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |20.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |20.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |20.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |20.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |20.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |20.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/impl/libclient-persqueue_public-impl.a |20.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |20.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |20.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |20.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a |20.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |21.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |21.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |21.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |21.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |21.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |21.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |21.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |21.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpy3cpython-symbols-python.global.a |21.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |21.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |21.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |22.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/import_test/libpy3python-testing-import_test.global.a |22.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |22.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |22.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |22.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |22.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |22.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |22.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/k8s_api/libpy3tools-cfg-k8s_api.global.a |22.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a |22.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/jemalloc/libcontrib-libs-jemalloc.a |23.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |23.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |23.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |23.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |23.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |24.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |23.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |24.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |24.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |24.2%| PREPARE $(YMAKE_PYTHON3-4256832079) |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |24.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |24.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |24.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/ut/ydb_cli-commands-interactive-highlight-ut |24.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |24.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |24.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |24.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |24.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libpy3core-protos-schemeshard.global.a |25.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |25.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |25.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |25.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |25.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.global.a |25.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |25.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |25.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |26.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |26.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |26.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |26.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |26.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |26.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |26.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |26.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.global.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |27.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |27.2%| PREPARE $(FLAKE8_LINTER-sbr:6561765464) |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.functools/py3/libpy3python-jaraco.functools-py3.global.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.context/libpy3contrib-python-jaraco.context.global.a |27.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.text/libpy3contrib-python-jaraco.text.global.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.collections/libpy3contrib-python-jaraco.collections.global.a |27.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.global.a |27.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |27.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |27.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |28.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libpy3api-service-protos.global.a |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/more-itertools/py3/libpy3python-more-itertools-py3.global.a |28.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/platformdirs/libpy3contrib-python-platformdirs.global.a |28.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libpy3providers-s3-proto.global.a |28.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |28.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.global.a |29.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |29.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |29.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |29.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |29.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |29.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.global.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |29.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.global.a |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |29.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |29.6%| PREPARE $(FLAKE8_PY3-715603131) |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.global.a |30.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |30.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |30.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.global.a |30.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |30.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.global.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |30.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |30.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.global.a |30.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.a |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typeguard/libpy3contrib-python-typeguard.global.a |30.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.global.a |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |30.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |30.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |30.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.global.a |30.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/libpy3contrib-python-websocket-client.global.a |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typing-extensions/py3/libpy3python-typing-extensions-py3.global.a |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.global.a |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |31.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wheel/libpy3contrib-python-wheel.global.a |31.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/skiff_ext/libyt-library-skiff_ext.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |32.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |32.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |32.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |32.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |32.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.a |32.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |32.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |32.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |32.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |32.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |32.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |32.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |33.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |33.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |33.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/constructor/libreader-sys_view-constructor.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |33.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |33.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |33.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |33.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |33.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |33.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |33.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |33.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |33.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |33.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |33.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |33.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |33.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |33.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |33.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/insert_table/libcolumnshard-engines-insert_table.a |34.0%| PREPARE $(LLD_ROOT-3808007503) |34.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |34.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |34.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |34.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |34.4%| PREPARE $(TEST_TOOL_HOST-sbr:8580453620) |34.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |34.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |34.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |34.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |35.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |36.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |36.1%| PREPARE $(PYTHON) |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |36.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |36.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |36.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/scheme/libio_formats-arrow-scheme.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |38.4%| COMPACTING CACHE 112.0KiB |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |38.6%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/common_test.context |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |38.9%| [ld] {default-linux-x86_64, relwithdebinfo} $(B)/tools/flake8_linter/flake8_linter |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/export.cpp |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/proto/libbackup-common-proto.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/main.cpp |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |40.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/generator |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |40.9%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |42.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/libcontrib-libs-tcmalloc.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/libcontrib-libs-tcmalloc.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TargetParser/libllvm16-lib-TargetParser.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Remarks/libllvm16-lib-Remarks.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TextAPI/libllvm16-lib-TextAPI.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCParser/liblib-MC-MCParser.a |44.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/ut/ydb-public-lib-ydb_cli-commands-topic_workload-ut |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ProfileData/libllvm16-lib-ProfileData.a |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/signature.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |45.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/clickbench/ydb-tests-functional-clickbench |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/generator.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Passes/libllvm16-lib-Passes.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/spec_patch.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/validator.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |46.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_partition_reader.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/private.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_client.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/timestamped_schema_helpers.cpp |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |50.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/strings/libabseil-cpp-absl-strings.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/types/libabseil-cpp-absl-types.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/synchronization/libabseil-cpp-absl-synchronization.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/time/libabseil-cpp-absl-time.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/numeric/libabseil-cpp-absl-numeric.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/profiling/libabseil-cpp-absl-profiling.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/log/libabseil-cpp-absl-log.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/hash/libabseil-cpp-absl-hash.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/debugging/libabseil-cpp-absl-debugging.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/flags/libabseil-cpp-absl-flags.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/container/libabseil-cpp-absl-container.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/types/libabseil-cpp-tstring-y_absl-types.a |51.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |51.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/libabseil-cpp-tstring-y_absl-flags.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/status/libabseil-cpp-tstring-y_absl-status.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/base/libabseil-cpp-absl-base.a |51.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |51.3%| [CF] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/sandbox.cpp |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/base/libabseil-cpp-tstring-y_absl-base.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |51.3%| [CF] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/build_info.cpp |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |51.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/benchmarks_init/ydb-tests-functional-benchmarks_init |51.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |51.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |51.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/time/libabseil-cpp-tstring-y_absl-time.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |51.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/portions/libreader-sys_view-portions.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |51.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/dec/liblibs-brotli-dec.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/random/libabseil-cpp-tstring-y_absl-random.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/enc/liblibs-brotli-enc.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/common/liblibs-brotli-common.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/hash/libabseil-cpp-tstring-y_absl-hash.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/container/libabseil-cpp-tstring-y_absl-container.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/profiling/libabseil-cpp-tstring-y_absl-profiling.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/log/libabseil-cpp-tstring-y_absl-log.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/libabseil-cpp-tstring-y_absl-debugging.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/numeric/libabseil-cpp-tstring-y_absl-numeric.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/libabseil-cpp-tstring-y_absl-synchronization.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp3/libcontrib-libs-nghttp3.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/libabseil-cpp-tstring-y_absl-strings.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/common/liboptimizer-sbuckets-common.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/slices/libsbuckets-logic-slices.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/optimizer/liboptimizer-sbuckets-optimizer.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/constructor/liboptimizer-sbuckets-constructor.global.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |52.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |52.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dump_restore/ydb-tests-datashard-dump_restore |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |52.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/ydb-tests-sql |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/config/liblibrary-cpp-config.a |52.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |52.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ydb-tests-olap |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/copy_table/ydb-tests-datashard-copy_table |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cancellation/libcpp-threading-cancellation.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |52.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/main.cpp |52.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |52.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/objcopy_774cbd1f10ee287899289ecb3f.o |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yajl/libcontrib-libs-yajl.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/commands/libcommands.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.global.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.global.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/yql_parser/libydb_cli-common-yql_parser.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_json/libydb-library-yaml_json.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/include/ydb-cpp-sdk/client/topic/libydb-cpp-sdk-client-topic.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/impl/libclient-common_client-impl.a |53.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/libsrc-client-common_client.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_endpoints/libclient-impl-ydb_endpoints.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/cms/libsrc-client-cms.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam/libsrc-client-iam.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/config/libsrc-client-config.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/debug/libsrc-client-debug.a |53.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/export/libsrc-client-export.a |53.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/common/libimpl-ydb_internal-common.a |53.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/split_merge/ydb-tests-datashard-split_merge |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/make_request/libimpl-ydb_internal-make_request.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/thread_pool/libimpl-ydb_internal-thread_pool.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/plain_status/libimpl-ydb_internal-plain_status.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/session_pool/libimpl-ydb_internal-session_pool.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_stats/libclient-impl-ydb_stats.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/grpc_connections/libimpl-ydb_internal-grpc_connections.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/llvm16/libyt-comp_nodes-llvm16.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.a |53.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/kqp_session_common/libimpl-ydb_internal-kqp_session_common.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/coordination/libsrc-client-coordination.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/draft/libsrc-client-draft.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/libproviders-yt-codec.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/job/libproviders-yt-job.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/ss_tasks/libsrc-client-ss_tasks.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/query_stats/libclient-table-query_stats.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/libclient-types-credentials.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/exceptions/libclient-types-exceptions.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/operation/libclient-types-operation.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3/libv1-lexer-antlr3.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/status/libclient-types-status.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/oauth2_token_exchange/libtypes-credentials-oauth2_token_exchange.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/decimal/libsrc-library-decimal.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/liblibrary-operation_id-protos.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/jwt/libsrc-library-jwt.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/issue/libsrc-library-issue.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/obfuscate/libsdk-library-persqueue-obfuscate-v3.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/string_utils/helpers/liblibrary-string_utils-helpers.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/scheme/libsrc-client-scheme.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/uuid/libsrc-library-uuid.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/antlr4/libparser-common-antlr4.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/libessentials-parser-common.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/value/libsrc-client-value.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/libsrc-client-table.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/syntax/libv1-complete-syntax.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3_ansi/libv1-lexer-antlr3_ansi.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/union/libname-service-union.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/fatal_error_handlers/libclient-types-fatal_error_handlers.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/libsrc-client-types.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure/libv1-lexer-antlr4_pure.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure_ansi/libv1-lexer-antlr4_pure_ansi.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3/libv1-proto_parser-antlr3.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4/libv1-proto_parser-antlr4.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.global.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4_ansi/libv1-proto_parser-antlr4_ansi.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_ansi/libv1-lexer-antlr4_ansi.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/http-parser/libcontrib-restricted-http-parser.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_ansi_antlr4/libantlr_ast-gen-v1_ansi_antlr4.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_antlr4/libantlr_ast-gen-v1_antlr4.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/proto/libproviders-yt-proto.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/expr_nodes/libproviders-ytflow-expr_nodes.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3_ansi/libv1-proto_parser-antlr3_ansi.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/native/libyt-gateway-native.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/interface/libytflow-integration-interface.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/usage/libtx-limiter-usage.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/result/libsrc-client-result.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4/libv1-lexer-antlr4.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/fbs/libclient-arrow-fbs.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lua/liblibrary-cpp-lua.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/cache/liblibrary-login-cache.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/account_lockout/liblibrary-login-account_lockout.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi/libproto_ast-gen-v1_ansi.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/password_checker/liblibrary-login-password_checker.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |54.5%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1/libproto_ast-gen-v1.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/color/libinteractive-highlight-color.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/impl/libclient-topic-impl.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/liblibs-breakpad-src.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/AsmParser/libllvm16-lib-AsmParser.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/BinaryFormat/libllvm16-lib-BinaryFormat.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRReader/libllvm16-lib-IRReader.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/libllvm16-lib-ExecutionEngine.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/Symbolize/liblib-DebugInfo-Symbolize.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Demangle/libllvm16-lib-Demangle.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Linker/libllvm16-lib-Linker.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/libllvm16-lib-Target.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Coroutines/liblib-Transforms-Coroutines.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/libllvm16-lib-MC.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Object/libllvm16-lib-Object.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Support/libllvm16-lib-Support.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IR/libllvm16-lib-IR.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Utils/liblib-Transforms-Utils.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/version/libversion_definition.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/libcore-backup-common.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/IPO/liblib-Transforms-IPO.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/liblib-Target-X86.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split/libproto_ast-gen-v1_proto_split.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/libydb-core-base.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Analysis/libllvm16-lib-Analysis.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/libcore-control-lib.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/libydb-core-control.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/common/librow_dispatcher-format_handler-common.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/batch/libkqp-common-batch.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/libllvm16-lib-CodeGen.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/actors/libproviders-solomon-actors.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/config/libcore-persqueue-config.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/partition_key_range/libcore-persqueue-partition_key_range.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/purecalc/libcore-persqueue-purecalc.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |56.7%| PREPARE $(WITH_JDK17-sbr:7832760150) |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/libydb-core-security.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateways_utils/libproviders-common-gateways_utils.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |57.3%| PREPARE $(WITH_JDK-sbr:7832760150) |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/granules/libreader-sys_view-granules.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/libreader-sys_view-abstract.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/common/libengines-scheme-common.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lua/libcontrib-libs-lua.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/status/libabseil-cpp-absl-status.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/optimizer/libreader-sys_view-optimizer.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/index/liboptimizer-sbuckets-index.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/one_head/libsbuckets-logic-one_head.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/abstract/libsbuckets-logic-abstract.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |57.6%| PREPARE $(JDK_DEFAULT-472926544) |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/abstract/libsession-storage-abstract.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |57.7%| PREPARE $(CLANG-1922233694) |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/import/libsrc-client-import.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/service/libtx-limiter-service.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/files/libydb_cli-dump-files.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/logger/libimpl-ydb_internal-logger.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/db_driver_state/libimpl-ydb_internal-db_driver_state.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a |58.1%| PREPARE $(JDK17-472926544) |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/evlog/libcore-util-evlog.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/login/libtypes-credentials-login.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/libydb-core-util.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/antlr4/libv1-complete-antlr4.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/text/libv1-complete-text.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.global.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/client/linux/libsrc-client-linux.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/common/libclient-topic-common.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/params/libsrc-client-params.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signal_backtrace/libydb-library-signal_backtrace.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signals/libydb-library-signals.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/counters/liboptimizer-sbuckets-counters.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/libyql-dq-comp_nodes.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/formats/libyt-client-formats.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam_private/libsrc-client-iam_private.a |58.7%| PREPARE $(GDB) |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/grpc/libsolomon-solomon_accessor-grpc.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/proto/libytflow-integration-proto.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/accessservice/libclient-nc_private-accessservice.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extension_common/libsrc-client-extension_common.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/topic_parser_public/libsdk-library-persqueue-topic_parser_public-v3.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/libsrc-client-federated_topic.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/impl/libclient-federated_topic-impl.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/run/librun.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/config/libydb-services-config.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/datastreams/libsrc-client-datastreams.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/view/libydb-services-view.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/proto/libfile_storage-http_download-proto.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/histogram/libessentials-core-histogram.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/codecs/libcore-persqueue-codecs.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/langver/libessentials-core-langver.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/include/libclient-ydb_topic-include.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/common/libproviders-solomon-common.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/tz/libpublic-udf-tz.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.global.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/global_plugins/libydb-library-global_plugins.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/counter_time_keeper/liblibrary-persqueue-counter_time_keeper.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.global.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http_client/libcpp-mapreduce-http_client.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/retry/libimpl-ydb_internal-retry.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/value_helpers/libimpl-ydb_internal-value_helpers.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/driver/libsrc-client-driver.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/converter/libarrow-csv-converter.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/complete/libcommands-interactive-complete.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/log/libyt-lib-log.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/common/libproviders-yt-common.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/libcomplete-name-service.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/schema/libyt-lib-schema.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/dq/llvm16/libcomp_nodes-dq-llvm16.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/codecs/libclient-topic-codecs.global.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.global.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/libyt-client-arrow.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/insert_table/libcolumnshard-normalizer-insert_table.global.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/procfs/libyt-library-procfs.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/signals/libyt-library-signals.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRPrinter/libllvm16-lib-IRPrinter.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/impl/libclient-table-impl.a |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/rate_limiter/libsrc-client-rate_limiter.a |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |60.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/random/libabseil-cpp-absl-random.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/chunks/libreader-sys_view-chunks.global.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/adapters/issue/libcpp-adapters-issue.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/client/libsolomon-solomon_accessor-client.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/operation/libsrc-client-operation.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4-c3/libcontrib-libs-antlr4-c3.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/monitoring/libsrc-client-monitoring.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/formats/libyt-library-formats.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/harmonizer/libactors-core-harmonizer.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/breakpad/libydb-library-breakpad.global.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/hash/libyt-lib-hash.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |60.8%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/objcopy_533f06087e794c7af638ea75dc.o |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/discovery/libsrc-client-discovery.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/libsql-v1-complete.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/libsrc-client-topic.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/tcmalloc/libcpp-malloc-tcmalloc.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/opt/libproviders-yt-opt.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/tier/libsession-storage-tier.global.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/column_converters/libyt-library-column_converters.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/parser/libminikql-jsonpath-parser.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |61.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |61.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/PDB/liblib-DebugInfo-PDB.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/events/libproviders-solomon-events.a |61.0%| PREPARE $(CLANG16-1380963495) |61.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |61.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |61.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |61.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.global.a |61.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm16/libminikql-codegen-llvm16.a |61.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |61.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/table/libarrow-csv-table.a |61.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |61.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |61.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |61.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/langver/libessentials-public-langver.a |61.2%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/ydb_cli |61.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |61.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |61.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm16/libminikql-computation-llvm16.a |61.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |61.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |61.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |61.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |61.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/libsrc-library-operation_id.a |61.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |61.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.a |61.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/grpc/client/libsdk-library-grpc-client-v3.a |61.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |61.5%| PREPARE $(CLANG-874354456) |61.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |61.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |61.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |61.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |61.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |61.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |61.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |61.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/libcommands-interactive-highlight.a |61.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/objcopy_7406de026bf25e30e96a88517d.o |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_query_ut.cpp |61.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |61.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_import_ut.cpp |61.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |61.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/chunk_queue/libcpp-threading-chunk_queue.a |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/main.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |61.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/tpch/lib/libtests-tpch-lib.global.a |61.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_prepare_scheme.cpp |61.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |61.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ut_helpers/libpublic-lib-ut_helpers.a |61.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/commands.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write_ut.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_drop.cpp |62.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/service_mocks/ldap_mock/libtestlib-service_mocks-ldap_mock.a |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_run_query.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/olap/high_load/read_update_write.cpp |62.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/objcopy_04f56802b68450abc8421282d0.o |62.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_prepare.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |62.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |62.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/objcopy_6403bfa5c5e35b29a21c73fb0e.o |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |62.3%| PREPARE $(CLANG18-1866954364) |62.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |62.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_run_bench.cpp |62.3%| [BI] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/buildinfo_data.h |62.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/tpch/lib/libtests-tpch-lib.a |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_read_rows_ut.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |62.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |62.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_login_ut.cpp |62.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |62.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |62.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |62.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |62.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |62.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |62.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |62.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |62.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |62.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |62.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |62.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |62.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |62.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |62.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |62.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |62.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/svnversion/svn_interface.c |63.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/build_info.cpp |63.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ut.cpp |63.6%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/rescompressor/rescompressor |63.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |63.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |63.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |64.0%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/rescompiler/rescompiler |64.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |64.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/svnversion/svnversion.cpp |64.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_ut.cpp |65.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/build_info/build_info_static.cpp |65.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/sandbox.cpp |65.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |65.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |65.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a >> YqlHighlightTests::Blank [GOOD] >> YqlHighlightTests::Invalid [GOOD] >> YqlHighlightTests::Keyword [GOOD] >> YqlHighlightTests::Operation [GOOD] >> YqlHighlightTests::FunctionIdentifier [GOOD] >> YqlHighlightTests::TypeIdentifier [GOOD] >> YqlHighlightTests::VariableIdentifier [GOOD] >> YqlHighlightTests::QuotedIdentifier [GOOD] >> YqlHighlightTests::String [GOOD] >> YqlHighlightTests::MultilineString [GOOD] >> YqlHighlightTests::TypedString [GOOD] >> YqlHighlightTests::Number [GOOD] >> YqlHighlightTests::SQL [GOOD] >> YqlHighlightTests::Emoji [GOOD] >> YqlHighlightTests::Typing [GOOD] >> YqlHighlightTests::Comment [GOOD] >> YqlHighlightTests::Multiline [GOOD] >> YqlHighlightTests::ANSI [GOOD] >> TCommandWorkloadTopicParamsTests::TestRun_StrToBytes_Simple [GOOD] >> TCommandWorkloadTopicParamsTests::TestRun_StrToBytes_Kilo [GOOD] >> TCommandWorkloadTopicParamsTests::TestRun_StrToBytes_Mega [GOOD] >> TCommandWorkloadTopicParamsTests::TestRun_StrToBytes_Giga [GOOD] >> TCommandWorkloadTopicParamsTests::TestRun_StrToBytes_Error [GOOD] >> TTopicWorkloadWriterProducerTests::HandleAckEvent_ShouldSaveStatistics [GOOD] >> TTopicWorkloadWriterProducerTests::Send_ShouldCallWriteMethodOfTheWriteSession [GOOD] >> TTopicWorkloadWriterProducerTests::WaitForContinuationToken_ShouldExtractContinuationTokenFromEvent [GOOD] >> TTopicWorkloadWriterProducerTests::WaitForContinuationToken_ShouldThrowExceptionIfEventOfTheWrongType [GOOD] |68.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/commands/interactive/highlight/ut/unittest >> YqlHighlightTests::ANSI [GOOD] |69.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm16/libminikql-invoke_builtins-llvm16.a |69.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/commands/topic_workload/ut/unittest >> TTopicWorkloadWriterProducerTests::WaitForContinuationToken_ShouldThrowExceptionIfEventOfTheWrongType [GOOD] |70.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.a >> test_copy_table.py::flake8 [GOOD] |71.5%| [TS] {RESULT} ydb/public/lib/ydb_cli/commands/interactive/highlight/ut/unittest >> test_tpcds.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> test_tpch_spilling.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_query.py::flake8 [GOOD] >> test_s3.py::flake8 [GOOD] |72.0%| [TS] {RESULT} ydb/public/lib/ydb_cli/commands/topic_workload/ut/unittest >> test_clickbench.py::flake8 [GOOD] >> test_diff_processing.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] >> test_dml.py::flake8 [GOOD] |73.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/flake8 >> test_copy_table.py::flake8 [GOOD] >> test_s3.py::flake8 [GOOD] >> test_crud.py::flake8 [GOOD] >> test_inserts.py::flake8 [GOOD] >> test_dump_restore.py::flake8 [GOOD] >> test_kv.py::flake8 [GOOD] |74.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/large/flake8 >> test_tpch_spilling.py::flake8 [GOOD] >> test_mixed.py::flake8 [GOOD] >> test_config_with_metadata.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test_distconf.py::flake8 [GOOD] >> test_generate_dynamic_config.py::flake8 [GOOD] >> test_log_scenario.py::flake8 [GOOD] >> zip_bomb.py::flake8 [GOOD] >> test_split_merge.py::flake8 [GOOD] >> column_table_helper.py::flake8 [GOOD] |74.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/lib/flake8 >> test_s3.py::flake8 [GOOD] |74.5%| [TS] {RESULT} ydb/tests/datashard/copy_table/flake8 |74.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a >> range_allocator.py::flake8 [GOOD] >> s3_client.py::flake8 [GOOD] >> thread_helper.py::flake8 [GOOD] >> time_histogram.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_client.py::flake8 [GOOD] |75.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/flake8 >> test_dml.py::flake8 [GOOD] |75.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/flake8 >> test_tpch.py::flake8 [GOOD] |75.2%| [TS] {RESULT} ydb/tests/functional/tpc/large/flake8 >> test_workload.py::flake8 [GOOD] >> test_generator.py::flake8 [GOOD] >> test_init.py::flake8 [GOOD] |75.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/flake8 >> test_s3.py::flake8 [GOOD] |75.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/flake8 >> test_kv.py::flake8 [GOOD] |75.7%| [TS] {RESULT} ydb/tests/sql/lib/flake8 >> test_bulkupserts_tpch.py::flake8 [GOOD] >> test_insert_delete_duplicate_records.py::flake8 [GOOD] >> test_insertinto_selectfrom.py::flake8 [GOOD] >> test_tiering.py::flake8 [GOOD] >> test_workload_manager.py::flake8 [GOOD] |76.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/flake8 >> test_generate_dynamic_config.py::flake8 [GOOD] |76.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/kv/tests/flake8 >> test_workload.py::flake8 [GOOD] |76.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/mixedpy/flake8 >> test_mixed.py::flake8 [GOOD] |76.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/flake8 >> test_dump_restore.py::flake8 [GOOD] |76.3%| [TS] {RESULT} ydb/tests/functional/tpc/medium/flake8 |76.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/flake8 >> zip_bomb.py::flake8 [GOOD] |76.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/common/flake8 >> ydb_client.py::flake8 [GOOD] |76.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/flake8 >> test_split_merge.py::flake8 [GOOD] >> test_async_replication.py::flake8 [GOOD] |76.4%| [TS] {RESULT} ydb/tests/datashard/dml/flake8 |76.4%| [TS] {RESULT} ydb/tests/datashard/s3/flake8 |76.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/large/flake8 >> test_workload_manager.py::flake8 [GOOD] |76.7%| [TS] {RESULT} ydb/tests/sql/flake8 |76.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/log/tests/flake8 >> test_workload.py::flake8 [GOOD] |76.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/flake8 >> test_init.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |77.6%| [TS] {RESULT} ydb/tests/functional/config/flake8 |77.8%| [TS] {RESULT} ydb/tests/stress/kv/tests/flake8 |78.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/flake8 >> test_async_replication.py::flake8 [GOOD] >> test_compatibility.py::flake8 [GOOD] >> test_followers.py::flake8 [GOOD] >> test_stress.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_alter_compression.py::flake8 [GOOD] >> test_alter_tiering.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_read_update_write_load.py::flake8 [GOOD] >> test_scheme_load.py::flake8 [GOOD] >> test_simple.py::flake8 [GOOD] |78.2%| [TS] {RESULT} ydb/tests/stress/mixedpy/flake8 |78.3%| [TS] {RESULT} ydb/tests/datashard/dump_restore/flake8 |78.4%| [TS] {RESULT} ydb/tests/olap/flake8 |78.4%| [TS] {RESULT} ydb/tests/olap/common/flake8 |78.5%| [TS] {RESULT} ydb/tests/datashard/split_merge/flake8 |78.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/flake8 >> test.py::flake8 [GOOD] |78.6%| [TS] {RESULT} ydb/tests/sql/large/flake8 |78.9%| [TS] {RESULT} ydb/tests/stress/log/tests/flake8 |79.0%| [TS] {RESULT} ydb/tests/functional/benchmarks_init/flake8 |79.5%| [TS] {RESULT} ydb/tests/datashard/async_replication/flake8 |79.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/compatibility/flake8 >> test_stress.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> parser.py::flake8 [GOOD] >> allure_utils.py::flake8 [GOOD] >> results_processor.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_cli.py::flake8 [GOOD] >> ydb_cluster.py::flake8 [GOOD] |80.0%| [TS] {RESULT} ydb/tests/functional/clickbench/flake8 |80.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/flake8 >> test_simple.py::flake8 [GOOD] |80.5%| [TS] {RESULT} ydb/tests/functional/compatibility/flake8 |80.8%| [TS] {RESULT} ydb/tests/olap/scenario/flake8 |81.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/docs/generator/flake8 >> parser.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> data_correctness.py::flake8 [GOOD] >> data_migration_when_alter_ttl.py::flake8 [GOOD] >> tier_delete.py::flake8 [GOOD] >> ttl_delete_s3.py::flake8 [GOOD] >> ttl_unavailable_s3.py::flake8 [GOOD] >> unstable_connection.py::flake8 [GOOD] |81.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/lib/flake8 >> ydb_cluster.py::flake8 [GOOD] |81.8%| [TS] {RESULT} ydb/tests/olap/docs/generator/flake8 |82.3%| [TS] {RESULT} ydb/tests/olap/lib/flake8 |83.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/flake8 >> unstable_connection.py::flake8 [GOOD] |84.2%| [TS] {RESULT} ydb/tests/olap/ttl_tiering/flake8 >> test_clickbench.py::flake8 [GOOD] >> test_tpcds.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] >> test_quota_exhaustion.py::flake8 [GOOD] |87.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a >> base.py::flake8 [GOOD] >> test_tpch_import.py::flake8 [GOOD] |88.6%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/olap_workload |89.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/load/flake8 >> test_tpch.py::flake8 [GOOD] |89.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/flake8 >> test_quota_exhaustion.py::flake8 [GOOD] |89.6%| [TS] {RESULT} ydb/tests/olap/load/flake8 |89.7%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/simple_queue |89.9%| [TS] {RESULT} ydb/tests/olap/data_quotas/flake8 >> alter_compression.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> ydb-tests-olap-load::import_test [GOOD] |90.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/s3_import/flake8 >> test_tpch_import.py::flake8 [GOOD] |92.0%| [TS] {RESULT} ydb/tests/olap/s3_import/flake8 >> ydb-tests-functional-clickbench::import_test [GOOD] |92.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/load/import_test >> ydb-tests-olap-load::import_test [GOOD] |92.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/flake8 >> base.py::flake8 [GOOD] |92.8%| [TS] {RESULT} ydb/tests/olap/load/import_test |93.3%| [TS] {RESULT} ydb/tests/olap/column_family/compression/flake8 >> __main__.py::flake8 [GOOD] >> overlapping_portions.py::flake8 [GOOD] |93.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/import_test >> ydb-tests-functional-clickbench::import_test [GOOD] |93.7%| [TS] {RESULT} ydb/tests/functional/clickbench/import_test >> generator::import_test [GOOD] |93.7%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/cfg |93.7%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/statistics_workload |93.7%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/nemesis |93.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stability/tool/flake8 >> __main__.py::flake8 [GOOD] |93.8%| [TS] {RESULT} ydb/tests/stability/tool/flake8 |93.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/oom/flake8 >> overlapping_portions.py::flake8 [GOOD] |93.8%| [TS] {RESULT} ydb/tests/olap/oom/flake8 |93.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/docs/generator/import_test >> generator::import_test [GOOD] |93.8%| [TS] {RESULT} ydb/tests/olap/docs/generator/import_test |93.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/yt/yt/client/libyt-yt-client.a |93.8%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |93.9%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a >> conftest.py::flake8 [GOOD] >> test_ydb_backup.py::flake8 [GOOD] >> test_ydb_flame_graph.py::flake8 [GOOD] >> test_ydb_impex.py::flake8 [GOOD] >> test_ydb_recursive_remove.py::flake8 [GOOD] >> test_ydb_scheme.py::flake8 [GOOD] >> test_ydb_scripting.py::flake8 [GOOD] >> test_ydb_sql.py::flake8 [GOOD] >> test_ydb_table.py::flake8 [GOOD] |93.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/flake8 >> test_ydb_table.py::flake8 [GOOD] |93.9%| [TS] {RESULT} ydb/tests/functional/ydb_cli/flake8 >> ydb-tests-functional-compatibility::import_test [GOOD] |93.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/compatibility/import_test >> ydb-tests-functional-compatibility::import_test [GOOD] |93.9%| [TS] {RESULT} ydb/tests/functional/compatibility/import_test >> ydb-tests-functional-config::import_test [GOOD] >> ydb-tests-functional-tpc-medium::import_test [GOOD] |93.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/import_test >> ydb-tests-functional-config::import_test [GOOD] |93.9%| [TS] {RESULT} ydb/tests/functional/config/import_test >> ydb-tests-olap-data_quotas::import_test [GOOD] >> ydb-tests-olap-s3_import::import_test [GOOD] >> ydb-tests-functional-ydb_cli::import_test [GOOD] |94.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/import_test >> ydb-tests-functional-tpc-medium::import_test [GOOD] |94.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/import_test >> ydb-tests-olap-data_quotas::import_test [GOOD] >> ydb-tests-functional-benchmarks_init::import_test [GOOD] >> ydb-tests-stress-mixedpy::import_test [GOOD] >> ydb-tests-olap-column_family-compression::import_test [GOOD] >> ydb-tests-stress-kv-tests::import_test [GOOD] |94.0%| [TS] {RESULT} ydb/tests/functional/tpc/medium/import_test |94.0%| [TS] {RESULT} ydb/tests/olap/data_quotas/import_test |94.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/s3_import/import_test >> ydb-tests-olap-s3_import::import_test [GOOD] |94.0%| [TS] {RESULT} ydb/tests/olap/s3_import/import_test |94.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/import_test >> ydb-tests-functional-ydb_cli::import_test [GOOD] |94.1%| [TS] {RESULT} ydb/tests/functional/ydb_cli/import_test |94.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/import_test >> ydb-tests-functional-benchmarks_init::import_test [GOOD] >> ydb-tests-functional-tpc-large::import_test [GOOD] >> ydb-tests-stress-log-tests::import_test [GOOD] >> ydb-tests-olap-oom::import_test [GOOD] |94.1%| [TS] {RESULT} ydb/tests/functional/benchmarks_init/import_test |94.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/mixedpy/import_test >> ydb-tests-stress-mixedpy::import_test [GOOD] |94.1%| [TS] {RESULT} ydb/tests/stress/mixedpy/import_test |94.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/import_test >> ydb-tests-olap-column_family-compression::import_test [GOOD] |94.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/kv/tests/import_test >> ydb-tests-stress-kv-tests::import_test [GOOD] |94.2%| [TS] {RESULT} ydb/tests/olap/column_family/compression/import_test |94.2%| [TS] {RESULT} ydb/tests/stress/kv/tests/import_test >> ydb-tests-olap-ttl_tiering::import_test [GOOD] |94.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/oom/import_test >> ydb-tests-olap-oom::import_test [GOOD] |94.2%| [TS] {RESULT} ydb/tests/olap/oom/import_test |94.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/large/import_test >> ydb-tests-functional-tpc-large::import_test [GOOD] |94.2%| [TS] {RESULT} ydb/tests/functional/tpc/large/import_test |94.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/log/tests/import_test >> ydb-tests-stress-log-tests::import_test [GOOD] |94.3%| [TS] {RESULT} ydb/tests/stress/log/tests/import_test >> ydb-tests-olap::import_test [GOOD] |94.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/import_test >> ydb-tests-olap-ttl_tiering::import_test [GOOD] |94.3%| [TS] {RESULT} ydb/tests/olap/ttl_tiering/import_test |94.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/import_test >> ydb-tests-olap::import_test [GOOD] |94.3%| [TS] {RESULT} ydb/tests/olap/import_test >> ydb-tests-datashard-dml::import_test [GOOD] |94.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/import_test >> ydb-tests-datashard-dml::import_test [GOOD] |94.3%| [TS] {RESULT} ydb/tests/datashard/dml/import_test >> ydb-tests-datashard-async_replication::import_test [GOOD] |94.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/ydb_cli/commands/ydb_workload.cpp |94.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/ydb_cli/commands/ydb_workload.cpp |94.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |94.3%| [AR] {RESULT} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |94.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/import_test >> ydb-tests-datashard-async_replication::import_test [GOOD] |94.4%| [TS] {RESULT} ydb/tests/datashard/async_replication/import_test |94.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |94.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |94.4%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load >> ydb-tests-sql::import_test [GOOD] >> ydb-tests-sql-large::import_test [GOOD] >> ydb-tests-datashard-copy_table::import_test [GOOD] |94.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |94.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/import_test >> ydb-tests-sql::import_test [GOOD] |94.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/large/import_test >> ydb-tests-sql-large::import_test [GOOD] |94.5%| [TS] {RESULT} ydb/tests/sql/import_test |94.5%| [TS] {RESULT} ydb/tests/sql/large/import_test |94.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/import_test >> ydb-tests-datashard-copy_table::import_test [GOOD] |94.5%| [TS] {RESULT} ydb/tests/datashard/copy_table/import_test >> ydb-tests-datashard-s3::import_test [GOOD] >> ydb-tests-datashard-dump_restore::import_test [GOOD] |94.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/import_test >> ydb-tests-datashard-s3::import_test [GOOD] |94.6%| [TS] {RESULT} ydb/tests/datashard/s3/import_test >> ydb-tests-datashard-split_merge::import_test [GOOD] |94.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/import_test >> ydb-tests-datashard-split_merge::import_test [GOOD] |94.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/import_test >> ydb-tests-datashard-dump_restore::import_test [GOOD] |94.6%| [TS] {RESULT} ydb/tests/datashard/split_merge/import_test |94.6%| [TS] {RESULT} ydb/tests/datashard/dump_restore/import_test |94.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/tests/tpch/tpch |94.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/tests/tpch/tpch |94.6%| [LD] {RESULT} $(B)/ydb/core/kqp/tests/tpch/tpch >> ydb-tests-olap-scenario::import_test [GOOD] |94.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/ydb/ydb |94.7%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |94.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |94.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/import_test >> ydb-tests-olap-scenario::import_test [GOOD] |94.7%| [TS] {RESULT} ydb/tests/olap/scenario/import_test |94.7%| [BN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/ydb_cli |94.7%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/ydb_cli >> test_init.py::TestTpcdsInit::test_s1_column_decimal_ydb >> test_generator.py::TestTpcdsGenerator::test_s1_state_and_parts >> test_generator.py::TestTpchGenerator::test_s1_parts >> test_init.py::TestTpcdsInit::test_s1_s3 >> test_init.py::TestClickbenchInit::test_s1_s3 >> test_init.py::TestTpchInit::test_s1_s3 >> test_init.py::TestClickbenchInit::test_s1_column >> test_init.py::TestTpcdsInit::test_s1_column_decimal_ydb [GOOD] >> test_init.py::TestTpcdsInit::test_s1_row >> test_generator.py::TestTpchGenerator::test_s1_state >> test_init.py::TestTpchInit::test_s1_column_decimal [GOOD] >> test_init.py::TestTpchInit::test_s1_row [GOOD] >> test_init.py::TestTpchInit::test_s1_column_decimal_ydb [GOOD] >> test_generator.py::TestTpchGenerator::test_s1 >> test_init.py::TestTpcdsInit::test_s1_column [GOOD] >> test_init.py::TestTpcdsInit::test_s1_column_decimal [GOOD] >> test_init.py::TestClickbenchInit::test_s1_s3 [GOOD] >> test_init.py::TestTpchInit::test_s1_s3 [GOOD] >> test_init.py::TestTpcdsInit::test_s100_column >> test_generator.py::TestTpcdsGenerator::test_s1_parts >> test_init.py::TestClickbenchInit::test_s1_column [GOOD] >> test_init.py::TestClickbenchInit::test_s1_row >> test_generator.py::TestTpchGenerator::test_s1_state_and_parts >> test_generator.py::TestTpcdsGenerator::test_s1_state >> test_generator.py::TestTpcdsGenerator::test_s1 >> test_init.py::TestTpcdsInit::test_s1_row [GOOD] >> test_init.py::TestTpchInit::test_s1_column [GOOD] |94.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/ydbd/ydbd |94.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |94.8%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpcdsInit::test_s1_s3 [GOOD] >> test_init.py::TestTpchInit::test_s100_column [GOOD] >> test_init.py::TestTpcdsInit::test_s100_column [GOOD] >> test_init.py::TestClickbenchInit::test_s1_row [GOOD] |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpcdsInit::test_s1_column_decimal [GOOD] |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_row [GOOD] |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_column_decimal [GOOD] |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestClickbenchInit::test_s1_row [GOOD] |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_column_decimal_ydb [GOOD] |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpcdsInit::test_s1_row [GOOD] |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_s3 [GOOD] |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s100_column [GOOD] |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_column [GOOD] |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpcdsInit::test_s100_column [GOOD] >> test_dml.py::TestDML::test_dml[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Date-pk_types18-all_types18-index18-Date--] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] >> test_dml.py::TestDML::test_dml[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] >> data_correctness.py::TestDataCorrectness::test |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] >> tier_delete.py::TestTierDelete::test_delete_s3_ttl >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Date-pk_types18-all_types18-index18-Date--] |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] >> test_dml.py::TestDML::test_dml[table_all_types-pk_types12-all_types12-index12---] >> test_dml.py::TestDML::test_dml[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] >> test_dml.py::TestDML::test_dml[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] >> test_dml.py::TestDML::test_dml[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] >> test_dml.py::TestDML::test_dml[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> test_ydb_recursive_remove.py::TestRecursiveRemove::test_various_scheme_objects >> test_dml.py::TestDML::test_dml[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> test_dml.py::TestDML::test_dml[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_all_types-pk_types7-all_types7-index7---] >> test_ydb_backup.py::TestBackupSingle::test_single_table_backup >> test_dml.py::TestDML::test_dml[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] >> test_dml.py::TestDML::test_dml[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_view >> test_ydb_backup.py::TestBackupSingle::test_single_table_backup [GOOD] >> test_ydb_recursive_remove.py::TestRecursiveRemove::test_various_scheme_objects [GOOD] |95.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |95.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |95.1%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] |95.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |95.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |95.1%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_view [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] |95.1%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/objcopy_533f06087e794c7af638ea75dc.o |95.2%| [PY] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/objcopy_533f06087e794c7af638ea75dc.o |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] |95.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |95.2%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |95.2%| [AR] {RESULT} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_view_json >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] >> test_dml.py::TestDML::test_dml[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] |95.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/tool |95.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/tool |95.2%| [LD] {RESULT} $(B)/ydb/tests/stability/tool/tool >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_view_json [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_recursive_remove.py::TestRecursiveRemove::test_various_scheme_objects [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_external_table_references_json >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint32 >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] >> test_dml.py::TestDML::test_dml[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] >> test_dml.py::TestDML::test_dml[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint32 [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint64_and_string >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_external_table_references_json [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_uint32 >> TTopicWriterTests::TestEnterMessage_OnlyDelimiters [GOOD] >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint64_and_string [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_list >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_list [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_struct >> test_dml.py::TestDML::test_dml[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_struct [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_Invalid_Encode [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] >> TTopicWriterTests::TestEnterMessage_EmptyInput [GOOD] >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] >> TTopicWriterTests::TestTopicWriterParams_Format_NewlineDelimited [GOOD] >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_No_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_all_types-pk_types7-all_types7-index7---] [GOOD] >> TTopicReaderTests::TestRun_ReadOneMessage >> TTopicWriterTests::TestTopicWriterParams_No_Delimiter [GOOD] >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_ZeroSymbol_Delimited [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_uint32 [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_uint64_and_string >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserLogin >> YdbTableBulkUpsertOlap::UpsertCsvBug >> YdbTableBulkUpsert::Nulls >> TGRpcNewCoordinationClient::CreateDropDescribe >> YdbMonitoring::SelfCheck |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_uint64_and_string [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_list >> test_ydb_table.py::TestExecuteQueryWithParams::test_list [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_struct >> TAuthenticationWithSqlExecution::CreateAlterUserWithHash >> test_ydb_table.py::TestExecuteQueryWithParams::test_struct [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_scan_query_with_parameters >> YdbImport::EmptyData |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_external_table_references_json [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_scan_query_with_parameters [GOOD] >> TGRpcNewCoordinationClient::CreateDropDescribe [GOOD] >> TGRpcNewCoordinationClient::CreateAlter >> YdbMonitoring::SelfCheck [GOOD] >> YdbMonitoring::SelfCheckWithNodesDying >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserLogin [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserPassword >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] >> YdbLogStore::LogStore >> TGRpcAuthentication::ValidCredentials ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/0001ce/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/0001ce/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 21013 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> YdbTableBulkUpsertOlap::UpsertCsvBug [GOOD] >> YdbTableBulkUpsertOlap::UpsertCSV >> YdbImport::EmptyData [GOOD] >> YdbImport::ImportFromS3ToExistingTable >> TGRpcClientLowTest::GrpcRequestProxy >> TGRpcNewCoordinationClient::CreateAlter [GOOD] >> TGRpcNewCoordinationClient::NodeNotFound >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserPassword [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidLogin >> TAuthenticationWithSqlExecution::CreateAlterUserWithHash [GOOD] >> TDatabaseQuotas::DisableWritesToDatabase >> TTableProfileTests::UseDefaultProfile >> TGRpcAuthentication::ValidCredentials [GOOD] >> TGRpcAuthentication::NoConnectRights >> TGRpcClientLowTest::GrpcRequestProxy [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyWithoutToken >> YdbImport::ImportFromS3ToExistingTable [GOOD] >> TGRpcNewCoordinationClient::NodeNotFound [GOOD] >> TYqlDecimalTests::SimpleUpsertSelect >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores >> TGRpcLdapAuthentication::LdapAuthWithInvalidLogin [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidPassword ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_all_types-pk_types7-all_types7-index7---] [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyWithoutToken [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Ignore >> TGRpcAuthentication::NoConnectRights [GOOD] >> TGRpcAuthentication::NoDescribeRights >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores [GOOD] >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback >> YdbTableBulkUpsertOlap::UpsertCSV [GOOD] >> YdbTableBulkUpsertOlap::UpsertCSV_DataShard >> TGRpcLdapAuthentication::LdapAuthWithInvalidPassword [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] >> test_ydb_backup.py::TestBackupSingleNotNull::test_single_table_backup >> TYqlDecimalTests::SimpleUpsertSelect [GOOD] >> TYqlDecimalTests::NegativeValues >> TTableProfileTests::UseDefaultProfile [GOOD] >> TTableProfileTests::UseTableProfilePreset >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesCorrectCerts >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Ignore [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Check >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts_AccessDenied >> YdbOlapStore::LogLast50ByResource >> TGRpcAuthentication::NoDescribeRights [GOOD] >> TGRpcClientLowTest::BiStreamPing >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback [GOOD] >> YdbTableBulkUpsertOlap::UpsertCSV_DataShard [GOOD] >> YdbTableBulkUpsertOlap::UpsertMixed >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword [GOOD] >> YdbTableBulkUpsert::Nulls [GOOD] >> YdbTableBulkUpsert::NotNulls ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback [GOOD] Test command err: 2025-05-03T08:43:42.736012Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137419236830589:2244];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:42.736081Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000187/r3tmp/tmpN8YjoU/pdisk_1.dat 2025-05-03T08:43:42.793943Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27683, node 1 2025-05-03T08:43:42.829256Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:42.829266Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:42.829268Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:42.829305Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:43:42.837724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:42.837751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:26685 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-03T08:43:42.839398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:42.849195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:42.855909Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-03T08:43:42.868061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:42.926882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-03T08:43:42.939252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropKesus, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-05-03T08:43:42.941403Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-05-03T08:43:42.942516Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2025-05-03T08:43:43.614246Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137421284319637:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:43.614290Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000187/r3tmp/tmpj8DudS/pdisk_1.dat 2025-05-03T08:43:43.632802Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7429, node 4 2025-05-03T08:43:43.653536Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:43.653550Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:43.653552Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:43.653596Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:43.714812Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:43.714843Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:43.716433Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:43.718003Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:43.733906Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:43.793602Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterKesus, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-03T08:43:43.805704Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterKesus, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-03T08:43:43.812417Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterKesus, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-03T08:43:44.481789Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137428294182382:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:44.481803Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000187/r3tmp/tmphA4Vbz/pdisk_1.dat 2025-05-03T08:43:44.517461Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22533, node 7 2025-05-03T08:43:44.544314Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:44.544331Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:44.544333Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:44.544381Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9111 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:44.582157Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:44.582191Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:44.583864Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:44.608322Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:45.479180Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137430793208267:2145];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:45.479297Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000187/r3tmp/tmpk3wZw6/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6510, node 10 2025-05-03T08:43:45.561474Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-05-03T08:43:45.561768Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:45.561770Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:45.561772Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:45.561819Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22643 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-03T08:43:45.581280Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:45.581326Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:45.585380Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:45.611857Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:45.625442Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:46.310908Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137434915496818:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:46.311014Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000187/r3tmp/tmphbBRd0/pdisk_1.dat 2025-05-03T08:43:46.335491Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12385, node 13 2025-05-03T08:43:46.375861Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:46.375878Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:46.375880Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:46.375931Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:46.411987Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:46.412018Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:46.414737Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:46.416176Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:46.429278Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> TYqlDecimalTests::NegativeValues [GOOD] >> TYqlDecimalTests::DecimalKey ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword [GOOD] Test command err: 2025-05-03T08:43:42.743011Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137419754126208:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:42.743069Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000176/r3tmp/tmprwkxmX/pdisk_1.dat 2025-05-03T08:43:42.843941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:42.843965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:42.845167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:42.855605Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1699, node 1 2025-05-03T08:43:42.869795Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:42.869809Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:42.869811Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:42.869859Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27906 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:42.913765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:43.603695Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137422494481025:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:43.603719Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000176/r3tmp/tmpSPYxK8/pdisk_1.dat 2025-05-03T08:43:43.635196Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6884, node 4 2025-05-03T08:43:43.650598Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:43.650615Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:43.650618Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:43.650652Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:43.704033Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:43.704065Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:43.705559Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:43.718933Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:44.513343Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137428005118227:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:44.513389Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000176/r3tmp/tmpCwE3Aj/pdisk_1.dat 2025-05-03T08:43:44.532933Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7556, node 7 2025-05-03T08:43:44.555134Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:44.555151Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:44.555153Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:44.555216Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:44.613291Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:44.613320Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:44.617106Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:44.617259Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000176/r3tmp/tmp0Mmx4d/pdisk_1.dat 2025-05-03T08:43:45.544439Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-03T08:43:45.553411Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15565, node 10 2025-05-03T08:43:45.575682Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:45.575692Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:45.575694Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:45.575739Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7505 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:45.637292Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:45.637323Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:45.638356Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:45.638531Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:46.424336Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137434992746640:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:46.424362Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000176/r3tmp/tmpLbY03j/pdisk_1.dat 2025-05-03T08:43:46.463147Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25288, node 13 2025-05-03T08:43:46.489187Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:46.489204Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:46.489206Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:46.489252Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13531 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:46.520595Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:46.520626Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:46.522816Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:46.543693Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> TGRpcClientLowTest::BiStreamPing [GOOD] >> TGRpcClientLowTest::BiStreamCancelled >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Check [GOOD] >> TGRpcClientLowTest::ChangeAcl >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts_AccessDenied [GOOD] >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesCorrectCerts ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter >> YdbTableBulkUpsertOlap::UpsertMixed [GOOD] >> YdbYqlClient::AlterTableAddIndex >> YdbTableBulkUpsert::NotNulls [GOOD] >> YdbTableBulkUpsert::Errors >> TTableProfileTests::UseTableProfilePreset [GOOD] >> TTableProfileTests::UseTableProfilePresetViaSdk >> TGRpcClientLowTest::BiStreamCancelled [GOOD] >> TGRpcClientLowTest::ChangeAcl [GOOD] >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesCorrectCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts >> YdbTableBulkUpsertOlap::UpsertArrowBatch >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesCorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts >> TTopicReaderTests::TestRun_ReadOneMessage [GOOD] >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 >> TYqlDecimalTests::DecimalKey [GOOD] >> YdbYqlClient::TestDecimal1 >> test_ydb_backup.py::TestBackupSingleNotNull::test_single_table_backup [GOOD] >> YdbOlapStore::LogLast50 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcClientLowTest::ChangeAcl [GOOD] Test command err: 2025-05-03T08:43:44.375563Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137426875790090:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:44.375609Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00013f/r3tmp/tmptlSQOL/pdisk_1.dat 2025-05-03T08:43:44.433832Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19807, node 1 2025-05-03T08:43:44.456909Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:44.456925Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:44.456927Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:44.456971Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13290 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-03T08:43:44.476280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:44.476310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:44.478585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:44.512417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2025-05-03T08:43:44.674844Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-05-03T08:43:44.676510Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-05-03T08:43:45.309500Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137431529983131:2261];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:45.309541Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00013f/r3tmp/tmpdouFh6/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22599, node 4 2025-05-03T08:43:45.390370Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-05-03T08:43:45.397209Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:45.397224Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:45.397225Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:45.397264Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22944 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-03T08:43:45.409971Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:45.409998Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:45.420359Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:45.465076Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="") => {STATUS_CODE_UNSPECIFIED, 16} 2025-05-03T08:43:45.483678Z node 4 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="") => {STATUS_CODE_UNSPECIFIED, 16} 2025-05-03T08:43:45.485696Z node 4 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="") => {STATUS_CODE_UNSPECIFIED, 16} 2025-05-03T08:43:46.117861Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137433120923340:2150];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:46.117938Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00013f/r3tmp/tmppop2DY/pdisk_1.dat 2025-05-03T08:43:46.151850Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8813, node 7 2025-05-03T08:43:46.195379Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:46.195394Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:46.195395Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:46.195451Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24671 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:46.218091Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:46.218124Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:46.219577Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:46.227106Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="") => {SUCCESS, 0} TestRequest(database="/blabla", token="") => {SUCCESS, 0} TestRequest(database="blabla", token="") => {SUCCESS, 0} TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-05-03T08:43:46.453368Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 2025-05-03T08:43:46.455322Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 2025-05-03T08:43:46.465273Z node 7 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token TestRequest(database="/Root", token="invalid token") => {UNAUTHORIZED, 0} TestRequest(database="/blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-05-03T08:43:46.469099Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 2025-05-03T08:43:46.471594Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-05-03T08:43:46.971351Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137436373206019:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:46.971396Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00013f/r3tmp/tmpxwObhk/pdisk_1.dat 2025-05-03T08:43:47.010879Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9393, node 10 2025-05-03T08:43:47.027432Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:47.027444Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:47.027446Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:47.027500Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27552 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:47.071616Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:47.071655Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:47.073236Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:47.088170Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="") => {SUCCESS, 0} TestRequest(database="/blabla", token="") => {SUCCESS, 0} TestRequest(database="blabla", token="") => {SUCCESS, 0} TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2025-05-03T08:43:47.298739Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-05-03T08:43:47.300808Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-05-03T08:43:47.302684Z node 10 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token TestRequest(database="/Root", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-05-03T08:43:47.305580Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-05-03T08:43:47.307954Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-05-03T08:43:47.860594Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137440656831430:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:47.860622Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00013f/r3tmp/tmpwlRUMs/pdisk_1.dat 2025-05-03T08:43:47.880085Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19758, node 13 2025-05-03T08:43:47.904737Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:47.904750Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:47.904752Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:47.904801Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3760 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:47.960929Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:47.960959Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:47.962536Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:47.969121Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:47.972892Z node 13 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:3760 2025-05-03T08:43:48.020866Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcClientLowTest::BiStreamCancelled [GOOD] Test command err: 2025-05-03T08:43:44.008854Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137425617354231:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:44.008870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000140/r3tmp/tmpUcZkCN/pdisk_1.dat 2025-05-03T08:43:44.063766Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11643, node 1 2025-05-03T08:43:44.082894Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:44.082904Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:44.082905Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:44.082936Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6722 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:44.106320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:44.109092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:44.109113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:44.110525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6722 TClient is connected to server localhost:6722 2025-05-03T08:43:44.175846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-03T08:43:44.341408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137425617355209:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:44.341444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137425617355190:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:44.341487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:44.342107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-05-03T08:43:44.346373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500137425617355219:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-05-03T08:43:44.427434Z node 1 :TX_PROXY ERROR: Actor# [1:7500137425617355290:2677] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } TClient is connected to server localhost:6722 TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1746261824153 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\001\020\200\204\002\032\004user \003" EffectiveACL: "\n\016\010\001\020\200\204\002\032\004user \003" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".metadata" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1746261824391 ParentPathId: 1 PathState: EPathStateCreate Owner: "met... (TRUNCATED) 2025-05-03T08:43:45.127368Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137429463939724:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:45.127407Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000140/r3tmp/tmpdOcl6B/pdisk_1.dat 2025-05-03T08:43:45.164022Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7235, node 4 2025-05-03T08:43:45.199251Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:45.199263Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:45.199265Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:45.199310Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20368 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:45.228419Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:45.228454Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:45.230044Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:45.249682Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:20368 2025-05-03T08:43:46.112561Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137435576940331:2144];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:46.112671Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000140/r3tmp/tmpSHL5S9/pdisk_1.dat 2025-05-03T08:43:46.146264Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9292, node 7 2025-05-03T08:43:46.178055Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:46.178069Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:46.178071Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:46.178118Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61922 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:46.212519Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:46.212553Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:46.214157Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:46.228779Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:61922 TClient is connected to server localhost:61922 2025-05-03T08:43:46.276382Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-03T08:43:46.461985Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137435576941234:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:46.462009Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137435576941215:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:46.462095Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:46.462960Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-03T08:43:46.468262Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7500137435576941244:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-03T08:43:46.538235Z node 7 :TX_PROXY ERROR: Actor# [7:7500137435576941315:2677] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } TClient is connected to server localhost:61922 TClient::Ls request: Root 2025-05-03T08:43:46.626624Z node 7 :TX_PROXY ERROR: Access denied for user with access DescribeSchema to path Root TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 12 ErrorReason: "Access denied" 2025-05-03T08:43:47.169592Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137439949250960:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:47.169619Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000140/r3tmp/tmpmC9oAe/pdisk_1.dat 2025-05-03T08:43:47.186479Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27956, node 10 2025-05-03T08:43:47.210325Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:47.210339Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:47.210340Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:47.210381Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31553 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:47.269754Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:47.269791Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:47.273055Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:47.276692Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:47.289317Z node 10 :TICKET_PARSER ERROR: Ticket some****oken (BB86510A): Could not find correct token validator 2025-05-03T08:43:47.289363Z node 10 :GRPC_SERVER ERROR: Received TEvRefreshTokenResponse, Authenticated = 0 2025-05-03T08:43:47.895166Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137440489238534:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:47.895187Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000140/r3tmp/tmpyIHtNt/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15690, node 13 2025-05-03T08:43:47.937592Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-05-03T08:43:47.941467Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:47.941478Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:47.941479Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:47.941524Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64635 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:47.995806Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:47.995839Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:47.997360Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:48.008371Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TYqlDecimalTests::DecimalKey [GOOD] Test command err: 2025-05-03T08:43:43.191048Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137422158009342:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:43.191074Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000153/r3tmp/tmp7eumnL/pdisk_1.dat 2025-05-03T08:43:43.248046Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20634, node 1 2025-05-03T08:43:43.283271Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:43.283296Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:43.283298Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:43.283340Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:43:43.291613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:43.291649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:43.293296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17758 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:43.326125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:43.537619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:44.326380Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137427471287998:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:44.326403Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000153/r3tmp/tmpneGaZI/pdisk_1.dat 2025-05-03T08:43:44.342396Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29622, node 4 2025-05-03T08:43:44.366772Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:44.366786Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:44.366790Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:44.366832Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9792 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:44.426691Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:44.426721Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:44.428162Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:44.430955Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:44.659454Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:45.440102Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137432619577244:2147];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:45.440901Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000153/r3tmp/tmpIGOUfm/pdisk_1.dat 2025-05-03T08:43:45.464070Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12071, node 7 2025-05-03T08:43:45.489052Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:45.489063Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:45.489064Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:45.489122Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:45.547093Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:45.547118Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:45.547879Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:45.548653Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-03T08:43:45.738775Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:45.802408Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137432619578251:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:45.802443Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:45.809085Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137432619578262:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:45.810067Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:45.814646Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7500137432619578265:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:43:45.901726Z node 7 :TX_PROXY ERROR: Actor# [7:7500137432619578336:2766] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:45.956576Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan1p89ej15n2zbs6rnpteb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YjUzZDQ2YjgtZmUyYzkwZGUtODcyN2MzYTItNjRlMDJiYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:45.978930Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jtan1pdb0kv7hxb2d3ayskyf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YjUzZDQ2YjgtZmUyYzkwZGUtODcyN2MzYTItNjRlMDJiYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:45.996656Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jtan1pdwd4nhcn4dw5cf252x, Database: , DatabaseId: /Root, SessionId: ... propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:47.066676Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137437749663007:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:47.066705Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137437749662996:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:47.066722Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:47.067397Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:47.074118Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7500137437749663010:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:43:47.155524Z node 10 :TX_PROXY ERROR: Actor# [10:7500137437749663085:2762] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:47.167008Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan1qftd5yz42fbqg1sen6m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=M2Q3ZDQxODAtODYyNGIzNzYtOGFlZGI2ZTQtMTFmNmVkYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:47.183069Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jtan1qk262q9q2kf5fc9sq3n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=M2Q3ZDQxODAtODYyNGIzNzYtOGFlZGI2ZTQtMTFmNmVkYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:47.202355Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jtan1qkg7ny0ehnyw9tj37m2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=M2Q3ZDQxODAtODYyNGIzNzYtOGFlZGI2ZTQtMTFmNmVkYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:47.217656Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jtan1qm513na6zbgb3q7m96g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=M2Q3ZDQxODAtODYyNGIzNzYtOGFlZGI2ZTQtMTFmNmVkYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:47.236351Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jtan1qmndfhv6y8gpar7bfhe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=M2Q3ZDQxODAtODYyNGIzNzYtOGFlZGI2ZTQtMTFmNmVkYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:47.818010Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137438683110441:2141];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:47.818033Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000153/r3tmp/tmpbvPrQ5/pdisk_1.dat 2025-05-03T08:43:47.864944Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15908, node 13 2025-05-03T08:43:47.887501Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:47.887514Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:47.887516Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:47.887557Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22556 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-03T08:43:47.918023Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:47.918054Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:47.945331Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:47.948857Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:48.188181Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:48.205105Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137442978078741:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:48.205111Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137442978078751:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:48.205138Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:48.205715Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:48.214878Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7500137442978078755:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:43:48.266870Z node 13 :TX_PROXY ERROR: Actor# [13:7500137442978078827:2761] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:48.278381Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan1rkc8ktqdn5x36c4sv5h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MTU0NzQ1ZS1iMjNhNjc1MS1hYzA1YzM5Yy01YjM1MzVhYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:48.294607Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jtan1rnsey0v4mxxvp8c2sze, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MTU0NzQ1ZS1iMjNhNjc1MS1hYzA1YzM5Yy01YjM1MzVhYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:48.308907Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jtan1rp8bwf0kp93s7d6h0k8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MTU0NzQ1ZS1iMjNhNjc1MS1hYzA1YzM5Yy01YjM1MzVhYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:48.323084Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jtan1rppb4r3n31j2kyjs71q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MTU0NzQ1ZS1iMjNhNjc1MS1hYzA1YzM5Yy01YjM1MzVhYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:48.338378Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jtan1rq47rfp9sddc0jptka3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MTU0NzQ1ZS1iMjNhNjc1MS1hYzA1YzM5Yy01YjM1MzVhYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:48.355084Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jtan1rqnfq4d9jjjrw270djn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MTU0NzQ1ZS1iMjNhNjc1MS1hYzA1YzM5Yy01YjM1MzVhYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:48.370413Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jtan1rr4brm2d1v3njm6d2rx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MTU0NzQ1ZS1iMjNhNjc1MS1hYzA1YzM5Yy01YjM1MzVhYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:48.396444Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jtan1rrk9n42r6fqa4e7e5a8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MTU0NzQ1ZS1iMjNhNjc1MS1hYzA1YzM5Yy01YjM1MzVhYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:48.420846Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jtan1rsh8hc7c0qw8wx4mw4p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MTU0NzQ1ZS1iMjNhNjc1MS1hYzA1YzM5Yy01YjM1MzVhYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:48.447944Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jtan1rt69mz9kszgm9qdgr4a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MTU0NzQ1ZS1iMjNhNjc1MS1hYzA1YzM5Yy01YjM1MzVhYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> YdbYqlClient::AlterTableAddIndex [GOOD] >> YdbTableBulkUpsert::Errors [GOOD] >> YdbTableBulkUpsert::Limits >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout [GOOD] >> TGRpcNewCoordinationClient::SessionReconnectReattach >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TTableProfileTests::DescribeTableWithPartitioningPolicy ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::AlterTableAddIndex [GOOD] Test command err: 2025-05-03T08:43:42.754976Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137418627525757:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:42.755013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000170/r3tmp/tmp1IH8bi/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30558, node 1 2025-05-03T08:43:42.854928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:42.854959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:42.857522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:42.863799Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:42.863810Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:42.863812Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:42.863854Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:43:42.864386Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:42.893710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:43.134261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 SUCCESS 2025-05-03T08:43:43.206450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137422922494024:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:43.206455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137422922494032:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:43.206471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:43.207188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:43.211184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500137422922494038:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-05-03T08:43:43.266377Z node 1 :TX_PROXY ERROR: Actor# [1:7500137422922494115:2787] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:43.331799Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7500137422922494158:2341] TxId: 281474976710662. Ctx: { TraceId: 01jtan1kq531n32v367g2np03k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODBkNThhZWQtOTFmN2U2NWItYzg1ODEzYTQtY2Q2YzdlYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2025-05-03T08:43:43.331886Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jtan1kq531n32v367g2np03k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODBkNThhZWQtOTFmN2U2NWItYzg1ODEzYTQtY2Q2YzdlYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:43.334628Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1746261823376, txId: 281474976710661] shutting down 2025-05-03T08:43:43.344811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-05-03T08:43:43.346473Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found BAD_REQUEST 2025-05-03T08:43:43.416047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-05-03T08:43:43.417575Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found SUCCESS 2025-05-03T08:43:43.486355Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000170/r3tmp/tmpt6sYZa/pdisk_1.dat 2025-05-03T08:43:44.048695Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-03T08:43:44.049661Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24340, node 4 2025-05-03T08:43:44.068795Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:44.068807Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:44.068809Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:44.068856Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17050 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:44.135664Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:44.135691Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:44.137255Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:44.138208Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:17050 2025-05-03T08:43:44.158556Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:44.169884Z node 4 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[4:7500137425900053795:2321];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-03T08:43:44.173375Z node 4 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[4:7500137425900053795:2321];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-03T08:43:44.173428Z node 4 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037890 2025-05-03T08:43:44.174130Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[4:7500137425900053795:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-03T08:43:44.174191Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[4:7500137425900053795:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-03T08:43:44.174244Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[4:7500137425900053795:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-03T08:43:44.174270Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[4:7500137425900053795:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-03T08:43:44.174306Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[4:7500137425900053795:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-03T08:43:44.174333Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[4:7500137425900053795:2321];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-03T08:43:44.174350Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[4:7500137425900053795:2321];tablet_id=720751862240378 ... id=72075186224037888;self_id=[10:7500137439257645773:2321];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=3;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1746261827730:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":22},{"from":18},{"from":16},{"from":24},{"from":10},{"from":12},{"from":8},{"from":6},{"from":20},{"from":14},{"from":4},{"from":2}]},{"owner_id":18,"inputs":[{"from":25}]},{"owner_id":2,"inputs":[{"from":25}]},{"owner_id":20,"inputs":[{"from":25}]},{"owner_id":4,"inputs":[{"from":25}]},{"owner_id":22,"inputs":[{"from":25}]},{"owner_id":6,"inputs":[{"from":25}]},{"owner_id":24,"inputs":[{"from":25}]},{"owner_id":25,"inputs":[{"from":26}]},{"owner_id":8,"inputs":[{"from":25}]},{"owner_id":26,"inputs":[]},{"owner_id":10,"inputs":[{"from":25}]},{"owner_id":12,"inputs":[{"from":25}]},{"owner_id":14,"inputs":[{"from":25}]},{"owner_id":16,"inputs":[{"from":25}]}],"nodes":{"8":{"p":{"i":"7","p":{"address":{"name":"json_payload","id":7}},"o":"7","t":"AssembleOriginalData"},"w":29,"id":8},"2":{"p":{"i":"12","p":{"address":{"name":"dbl","id":12}},"o":"12","t":"AssembleOriginalData"},"w":29,"id":2},"18":{"p":{"i":"2","p":{"address":{"name":"resource_type","id":2}},"o":"2","t":"AssembleOriginalData"},"w":29,"id":18},"0":{"p":{"i":"12,11,8,7,5,6,10,3,2,9,1,4","t":"Projection"},"w":348,"id":0},"4":{"p":{"i":"11","p":{"address":{"name":"flt","id":11}},"o":"11","t":"AssembleOriginalData"},"w":29,"id":4},"20":{"p":{"i":"9","p":{"address":{"name":"saved_at","id":9}},"o":"9","t":"AssembleOriginalData"},"w":29,"id":20},"16":{"p":{"i":"3","p":{"address":{"name":"resource_id","id":3}},"o":"3","t":"AssembleOriginalData"},"w":29,"id":16},"24":{"p":{"i":"4","p":{"address":{"name":"uid","id":4}},"o":"4","t":"AssembleOriginalData"},"w":29,"id":24},"14":{"p":{"i":"10","p":{"address":{"name":"request_id","id":10}},"o":"10","t":"AssembleOriginalData"},"w":29,"id":14},"10":{"p":{"i":"5","p":{"address":{"name":"level","id":5}},"o":"5","t":"AssembleOriginalData"},"w":29,"id":10},"25":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1},{"name":"resource_type","id":2},{"name":"resource_id","id":3},{"name":"uid","id":4},{"name":"level","id":5},{"name":"message","id":6},{"name":"json_payload","id":7},{"name":"ingested_at","id":8},{"name":"saved_at","id":9},{"name":"request_id","id":10},{"name":"flt","id":11},{"name":"dbl","id":12}]},"o":"1,2,3,4,5,6,7,8,9,10,11,12","t":"FetchOriginalData"},"w":24,"id":25},"6":{"p":{"i":"8","p":{"address":{"name":"ingested_at","id":8}},"o":"8","t":"AssembleOriginalData"},"w":29,"id":6},"22":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":29,"id":22},"12":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":29,"id":12},"26":{"p":{"p":{"data":[{"name":"timestamp","id":1},{"name":"resource_type","id":2},{"name":"resource_id","id":3},{"name":"uid","id":4},{"name":"level","id":5},{"name":"message","id":6},{"name":"json_payload","id":7},{"name":"ingested_at","id":8},{"name":"saved_at","id":9},{"name":"request_id","id":10},{"name":"flt","id":11},{"name":"dbl","id":12}]},"o":"0","t":"ReserveMemory"},"w":0,"id":26}}}; 2025-05-03T08:43:47.800316Z node 10 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-05-03T08:43:47.800331Z node 10 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-05-03T08:43:47.800335Z node 10 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-05-03T08:43:47.800342Z node 10 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-05-03T08:43:47.800648Z node 10 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-05-03T08:43:47.800650Z node 10 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-05-03T08:43:47.800679Z node 10 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-05-03T08:43:47.800682Z node 10 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-05-03T08:43:47.801975Z node 10 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037888 2025-05-03T08:43:47.802545Z node 10 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037889 2025-05-03T08:43:47.806299Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1746261827730, txId: 18446744073709551615] shutting down 2025-05-03T08:43:48.412993Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137444632904753:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:48.413020Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000170/r3tmp/tmpmXFCzv/pdisk_1.dat 2025-05-03T08:43:48.430297Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6952, node 13 2025-05-03T08:43:48.450989Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:48.451002Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:48.451005Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:48.451053Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28325 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:48.513756Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:48.513789Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:48.515390Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:48.519610Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:48.725465Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137444632905698:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:48.725488Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:48.730049Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:48.791383Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137444632905861:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:48.791404Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:48.791412Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137444632905866:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:48.792098Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:48.803205Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7500137444632905868:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:43:48.885457Z node 13 :TX_PROXY ERROR: Actor# [13:7500137444632905941:2766] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:48.895259Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan1s5q0g728qdmymddgtxa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NWE0ZmI1NGYtOWQyNTFmZTgtZGU3M2YzOWQtNTQ2OTQ3ZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:48.901112Z node 13 :TX_PROXY WARN: [AlterTableAddIndex [13:7500137444632905989:2366] TxId# 281474976715663] Access check failed 2025-05-03T08:43:48.906117Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-05-03T08:43:48.920181Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-05-03T08:43:48.937729Z node 13 :TX_PROXY ERROR: [AlterTableAddIndex [13:7500137444632906361:2378] TxId# 281474976715665] Unable to navigate: Root/WrongPath status: PathErrorUnknown 2025-05-03T08:43:48.954433Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 13, TabletId: 72075186224037889 not found >> YdbTableBulkUpsertOlap::UpsertArrowBatch [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowDupField >> YdbYqlClient::TestDecimal1 [GOOD] >> YdbYqlClient::TestDecimal35 >> YdbOlapStore::LogLast50ByResource [GOOD] >> YdbOlapStore::LogNonExistingRequest >> YdbYqlClient::CreateTableWithPartitionAtKeys >> TTableProfileTests::UseTableProfilePresetViaSdk [GOOD] >> TTableProfileTests::WrongTableProfile >> TGRpcNewCoordinationClient::SessionReconnectReattach [GOOD] >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions >> YdbYqlClient::TestReadTableMultiShardWholeTable >> TGRpcYdbTest::DropTableBadRequest >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts >> TGRpcNewCoordinationClient::SessionMethods >> YdbTableBulkUpsertOlap::UpsertArrowDupField [GOOD] >> YdbTableBulkUpsertOlap::ParquetImportBug >> YdbYqlClient::TestDecimal35 [GOOD] >> YdbYqlClient::TestDecimalFullStack >> test_dml.py::TestDML::test_dml[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] >> test_dml.py::TestDML::test_dml[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] >> YdbTableBulkUpsert::Limits [GOOD] >> YdbTableBulkUpsert::DecimalPK >> YdbYqlClient::CreateTableWithPartitionAtKeys [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningBySize >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientProvidesEmptyClientCerts >> TGRpcYdbTest::DropTableBadRequest [GOOD] >> TGRpcYdbTest::CreateTableWithIndex >> TTableProfileTests::DescribeTableWithPartitioningPolicy [GOOD] >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions [GOOD] >> TTableProfileTests::DescribeTableOptions >> TGRpcYdbTest::AlterTableAddIndexBadRequest >> TTableProfileTests::WrongTableProfile [GOOD] >> TYqlDateTimeTests::DateKey >> YdbYqlClient::TestReadTableMultiShardWholeTable [GOOD] >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot >> TGRpcNewCoordinationClient::SessionMethods [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchData >> YdbOlapStore::LogLast50 [GOOD] >> YdbOlapStore::LogGrepNonExisting >> YdbTableBulkUpsertOlap::ParquetImportBug [GOOD] >> YdbTableBulkUpsertOlap::ParquetImportBug_Datashard ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> TDatabaseQuotas::DisableWritesToDatabase [GOOD] >> TGRpcAuthentication::InvalidPassword >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000182/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000182/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 20711 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> YdbYqlClient::CreateAndAltertTableWithPartitioningBySize [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningByLoad >> YdbYqlClient::TestDecimalFullStack [GOOD] >> TGRpcYdbTest::AlterTableAddIndexBadRequest [GOOD] >> YdbYqlClient::TestDescribeDirectory >> TGRpcYdbTest::BeginTxRequestError >> YdbTableBulkUpsert::DecimalPK [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] >> TGRpcYdbTest::CreateTableWithIndex [GOOD] >> TGRpcYdbTest::CreateYqlSession >> YdbLogStore::LogStore [GOOD] >> TYqlDateTimeTests::DateKey [GOOD] >> YdbLogStore::LogStoreNegative >> TTableProfileTests::DescribeTableOptions [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchData [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchOwners ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::DecimalPK [GOOD] Test command err: 2025-05-03T08:43:42.744983Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137419822743288:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:42.745017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00017b/r3tmp/tmpMI6peU/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18288, node 1 2025-05-03T08:43:42.846400Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-03T08:43:42.847350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:42.847382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:42.848935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:42.850128Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:42.850140Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:42.850141Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:42.850184Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:42.906749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:43.058287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 SUCCESS 2025-05-03T08:43:43.129223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137424117711680:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:43.129253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:43.129328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137424117711692:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:43.130203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:43.134505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500137424117711694:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:43:43.201364Z node 1 :TX_PROXY ERROR: Actor# [1:7500137424117711765:2768] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:43.296192Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan1kmregw2931spycynpda, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQwNzg3NmYtNmFjMmViY2YtYTI4NGUyMjItNWQ0Y2Y5N2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-05-03T08:43:43.343868Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jtan1kt6cfsqbkcjns1b3t6w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQwNzg3NmYtNmFjMmViY2YtYTI4NGUyMjItNWQ0Y2Y5N2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-05-03T08:43:43.355165Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-05-03T08:43:43.355718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 SUCCESS 2025-05-03T08:43:43.413568Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jtan1kw82f3dee7jk4z3hcp8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBlMzE2NDktMjMzNWQzNDEtMmMyYjYyMjUtZjkzYTE2NGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-05-03T08:43:43.450962Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jtan1kxqcbnkawy4mxz8hy17, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBlMzE2NDktMjMzNWQzNDEtMmMyYjYyMjUtZjkzYTE2NGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-05-03T08:43:43.462157Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-05-03T08:43:43.462504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 SUCCESS 2025-05-03T08:43:43.512421Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jtan1kzk7w46q99s5kbhbtaj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjEyNjUxMjEtNzM0NGVmZWEtZTI1MzVhZWEtOTQ2M2M0MGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-05-03T08:43:43.551922Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jtan1m0tfsvgkhdh9t0qg85p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjEyNjUxMjEtNzM0NGVmZWEtZTI1MzVhZWEtOTQ2M2M0MGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-05-03T08:43:43.564398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-05-03T08:43:43.566071Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found SUCCESS SUCCESS count returned 0 rows 2025-05-03T08:43:43.617144Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jtan1m2rev2h3b7nwaz0vnfk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjYyOGRhMDgtMzY0N2JmYWItZWVjYWRhZmEtY2YyMzU5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:43.673483Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jtan1m48brrmy27w1c5ebngd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjYyOGRhMDgtMzY0N2JmYWItZWVjYWRhZmEtY2YyMzU5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-05-03T08:43:43.684415Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-05-03T08:43:43.685057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 SUCCESS 2025-05-03T08:43:43.743667Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jtan1m6kevzf8vd6qtv7smsc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWFjN2YwNGQtYTNjYzI1YS00ZjFhNGJhZS03YzRjYjNjZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-05-03T08:43:43.790121Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jtan1m81a4mdy92qcrg4rd9d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWFjN2YwNGQtYTNjYzI1YS00ZjFhNGJhZS03YzRjYjNjZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-05-03T08:43:43.802079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-05-03T08:43:43.802212Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found SUCCESS 2025-05-03T08:43:43.859635Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jtan1ma7ccwn2sc734vn4jm9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTllZDBkMTUtOTg0MDcxNDEtOTM5YWNhNGMtZGU4Yzc4NmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-05-03T08:43:43.905558Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jtan1mbn1kq3pzhw55nh4z23, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTllZDBkMTUtOTg0MDcxNDEtOTM5YWNhNGMtZGU4Yzc4NmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-05-03T08:43:43.923580Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-05-03T08:43:43.924403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 SUCCESS 2025-05-03T08:43:43.987395Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jtan1me2398kcxqwrweb09wm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWI0YjIwOGQtOTMxZWRjMDgtMjdhODI1YTgtN2JmMDdkY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count ... e profiles were not loaded TServer::EnableGrpc on GrpcPort 61038, node 7 2025-05-03T08:43:48.543004Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:48.543019Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:48.543021Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:48.543072Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:48.606954Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:48.606981Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:48.608459Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:48.615329Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:48.834075Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Traces' unknown table
: Error: Bulk upsert to table '/Root/Logs' Missing key columns: Timestamp
: Error: Bulk upsert to table '/Root/Logs' Missing key columns: Shard
: Error: Bulk upsert to table '/Root/Logs' Type mismatch, got type Uint64 for column App, but expected Utf8
: Error: Bulk upsert to table '/Root/Logs' Type mismatch, got type Uint64 for column Message, but expected Utf8
: Error: Bulk upsert to table '/Root/Logs' Unknown column: HttpCode 2025-05-03T08:43:49.550410Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137447566865018:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:49.550741Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00017b/r3tmp/tmpArNdDS/pdisk_1.dat 2025-05-03T08:43:49.572937Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14295, node 10 2025-05-03T08:43:49.596283Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:49.596297Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:49.596299Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:49.596361Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7142 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:49.650956Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:49.650982Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:49.652439Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:49.661464Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:49.921222Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Limits' Row key size of 1100002 bytes is larger than the allowed threshold 1049600
: Error: Bulk upsert to table '/Root/Limits' Row key size of 1100002 bytes is larger than the allowed threshold 1049600
: Error: Bulk upsert to table '/Root/Limits' Row key size of 1100000 bytes is larger than the allowed threshold 1049600
: Error: Bulk upsert to table '/Root/Limits' Row cell size of 17000022 bytes is larger than the allowed threshold 16777216 2025-05-03T08:43:51.426928Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137456437290168:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:51.426950Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00017b/r3tmp/tmp0TrSLM/pdisk_1.dat 2025-05-03T08:43:51.459424Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10615, node 13 2025-05-03T08:43:51.476544Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:51.476555Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:51.476558Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:51.476612Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24765 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:51.527414Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:51.527456Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:51.529019Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:51.542521Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:51.731964Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:51.794617Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137456437291262:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:51.794617Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137456437291273:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:51.794638Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:51.795294Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:51.799049Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7500137456437291276:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:43:51.898242Z node 13 :TX_PROXY ERROR: Actor# [13:7500137456437291349:2769] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:51.930889Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan1w3j7y3rj4z9ectvhtbg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YWI4ZTgwMzYtODAyZWUxZC0xZjBlYWQzOC0zZWZkZTNlYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribe >> YdbOlapStore::LogNonExistingRequest [GOOD] >> YdbOlapStore::LogNonExistingUserId >> TGRpcAuthentication::InvalidPassword [GOOD] >> TGRpcAuthentication::DisableLoginAuthentication >> YdbTableBulkUpsertOlap::ParquetImportBug_Datashard [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TYqlDateTimeTests::DateKey [GOOD] Test command err: 2025-05-03T08:43:45.132508Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137430023844487:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:45.132692Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00013a/r3tmp/tmpcAx8MD/pdisk_1.dat 2025-05-03T08:43:45.236170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:45.236203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:45.237651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3538, node 1 2025-05-03T08:43:45.246086Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-03T08:43:45.268017Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:45.268032Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:45.268034Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:45.268075Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:45.318274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:45.323272Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:4018 2025-05-03T08:43:45.417819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:45.421888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:45.923181Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500137428748966870:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:45.923204Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:43:45.929399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:45.929421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:45.930736Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-05-03T08:43:45.930986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4018 2025-05-03T08:43:45.955691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:4018 TClient::Ls request: /Root/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1746261826057 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-05-03T08:43:46.039071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:4018 TClient::Ls request: /Root/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1746261826092 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-05-03T08:43:46.065261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:4018 TClient::Ls request: /Root/table-3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-3" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710662 CreateStep: 1746261826120 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-05-03T08:43:46.091179Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-05-03T08:43:46.091291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-03T08:43:46.253053Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-03T08:43:46.253189Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7500137433043934507:2307], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-03T08:43:46.308480Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7500137433043934507:2307], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-03T08:43:46.750122Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137434620594881:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:46.750247Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00013a/r3tmp/tmpuecKvo/pdisk_1.dat 2025-05-03T08:43:46.775218Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11258, node 4 2025-05-03T08:43:46.803240Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:46.803268Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:46.803271Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:46.803323Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6764 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:46.850666Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) Vola ... olumns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" ... (TRUNCATED) 2025-05-03T08:43:49.833198Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-05-03T08:43:49.834045Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-03T08:43:50.464940Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137451896230446:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:50.464967Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00013a/r3tmp/tmpjKC339/pdisk_1.dat 2025-05-03T08:43:50.486499Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21550, node 10 2025-05-03T08:43:50.498652Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:50.498664Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:50.498665Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:50.498699Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26087 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:50.565280Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:50.565315Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:50.566669Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:50.573087Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:26087 2025-05-03T08:43:50.625499Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:50.629865Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:51.131347Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7500137456744781787:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:51.131367Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:43:51.137378Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:51.137409Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:51.139288Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-05-03T08:43:51.139556Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26087 2025-05-03T08:43:51.182216Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-05-03T08:43:51.182328Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-03T08:43:51.445885Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7500137456744782126:2304], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-03T08:43:51.445936Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-03T08:43:51.744445Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137454520927668:2272];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:51.744469Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00013a/r3tmp/tmpq0h8Zo/pdisk_1.dat 2025-05-03T08:43:51.777367Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27894, node 13 2025-05-03T08:43:51.788469Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:51.788483Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:51.788485Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:51.788529Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:51.844664Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:51.844696Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:51.846253Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:51.855553Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:52.038995Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:52.101556Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137458815895853:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:52.101571Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137458815895842:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:52.101604Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:52.102374Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:52.106315Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7500137458815895856:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:43:52.202645Z node 13 :TX_PROXY ERROR: Actor# [13:7500137458815895929:2768] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:52.215549Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan1wd5demzf7y1ehv9sqtj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzM5MGEyYzUtNTY1MmM2NDktY2QzMzY3ZTktYmZmYTQ1Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:52.239790Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jtan1wgz039nf17ted5f6jp9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzM5MGEyYzUtNTY1MmM2NDktY2QzMzY3ZTktYmZmYTQ1Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:52.261667Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jtan1whh54zp2wsh2mt1hqgh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzM5MGEyYzUtNTY1MmM2NDktY2QzMzY3ZTktYmZmYTQ1Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:52.285562Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jtan1wj724bs0zyvd82gdg0h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzM5MGEyYzUtNTY1MmM2NDktY2QzMzY3ZTktYmZmYTQ1Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TTableProfileTests::DescribeTableOptions [GOOD] Test command err: 2025-05-03T08:43:47.113201Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137438277143064:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:47.113247Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000136/r3tmp/tmpr63J0v/pdisk_1.dat 2025-05-03T08:43:47.167252Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28964, node 1 2025-05-03T08:43:47.191584Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:47.191596Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:47.191598Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:47.191633Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62918 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-03T08:43:47.214573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:47.214606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-05-03T08:43:47.216105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:47.243450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: UNAUTHORIZED Reason: "Cannot authorize node. Access denied" } 2025-05-03T08:43:47.972019Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137437233197880:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:47.972090Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000136/r3tmp/tmpXqaZY0/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16574, node 4 2025-05-03T08:43:48.009880Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-05-03T08:43:48.013088Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:48.013101Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:48.013102Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:48.013151Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27949 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:48.072727Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:48.072755Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:48.074365Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:48.086220Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1746269027994181 Nodes { NodeId: 1024 Host: "localhost" Port: 7206 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1746269027994181 } Nodes { NodeId: 4 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 5 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 6 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-05-03T08:43:48.888174Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137442686774561:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:48.888202Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000136/r3tmp/tmpP8PMq8/pdisk_1.dat 2025-05-03T08:43:48.906437Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64138, node 7 2025-05-03T08:43:48.929240Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:48.929251Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:48.929253Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:48.929312Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:48.988746Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:48.988783Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:48.990247Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:48.995720Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1746269028905040 Nodes { NodeId: 1024 Host: "localhost" Port: 29723 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1746269028905040 } Nodes { NodeId: 7 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 8 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 9 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-05-03T08:43:49.691616Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137446869742724:2144];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:49.691652Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000136/r3tmp/tmp6h1Ksc/pdisk_1.dat 2025-05-03T08:43:49.723197Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1657, node 10 2025-05-03T08:43:49.745332Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:49.745348Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:49.745351Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:49.745405Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27406 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:49.791501Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:49.791530Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:49.793067Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:49.800752Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:27406 2025-05-03T08:43:49.831899Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:49.836082Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:50.342288Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7500137450593966821:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:50.342584Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:43:50.344552Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:50.344586Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:50.345624Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-05-03T08:43:50.345826Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27406 2025-05-03T08:43:50.378342Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:27406 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1746261830920 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "Data" Type: "String" TypeId: 4097 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "KeyHash" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-05-03T08:43:50.910841Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-05-03T08:43:50.911121Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-03T08:43:51.570653Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137456044321857:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:51.570670Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000136/r3tmp/tmpaDpSHH/pdisk_1.dat 2025-05-03T08:43:51.602547Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7104, node 13 2025-05-03T08:43:51.614739Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:51.614751Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:51.614755Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:51.614794Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18175 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:51.671044Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:51.671073Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:51.672511Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:51.692845Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:18175 2025-05-03T08:43:51.725445Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:51.729473Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:52.231013Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7500137462289890012:2230];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:52.231052Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:43:52.232522Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:52.232543Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:52.234052Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-05-03T08:43:52.234334Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18175 2025-05-03T08:43:52.263360Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-05-03T08:43:52.263491Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected >> YdbLogStore::LogStoreNegative [GOOD] >> YdbLogStore::Dirs >> YdbYqlClient::TestReadTableOneBatch >> TGRpcYdbTest::BeginTxRequestError [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningByLoad [GOOD] >> YdbYqlClient::CreateAndAltertTableWithReadReplicasSettings >> YdbYqlClient::TestDescribeDirectory [GOOD] >> TGRpcYdbTest::RemoveNotExistedDirectory >> TGRpcYdbTest::CreateYqlSession [GOOD] >> TGRpcYdbTest::CreateYqlSessionExecuteQuery ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts [GOOD] Test command err: 2025-05-03T08:43:47.196898Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137441152470977:2212];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:47.197330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000132/r3tmp/tmpTvrblj/pdisk_1.dat 2025-05-03T08:43:47.264427Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7596, node 1 2025-05-03T08:43:47.290342Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:47.290355Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:47.290358Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:47.290407Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:43:47.295863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:47.295909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:47.297578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:47.319636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:47.347506Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:46064) has now valid token of root@builtin 2025-05-03T08:43:47.357692Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-05-03T08:43:47.357707Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-05-03T08:43:47.357709Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-05-03T08:43:47.357720Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-05-03T08:43:48.099989Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137442232258632:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:48.100019Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000132/r3tmp/tmpOk2YtN/pdisk_1.dat 2025-05-03T08:43:48.117287Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23921, node 4 2025-05-03T08:43:48.134749Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:48.134770Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:48.134772Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:48.134826Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9757 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:48.200574Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:48.200607Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:48.202819Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:48.203189Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:48.231939Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:41132) has now valid token of root@builtin 2025-05-03T08:43:48.268645Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-05-03T08:43:48.268662Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-05-03T08:43:48.268665Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-05-03T08:43:48.268679Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-05-03T08:43:48.957958Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137442436718189:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:48.958040Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000132/r3tmp/tmp6pNQ39/pdisk_1.dat 2025-05-03T08:43:48.982888Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10367, node 7 2025-05-03T08:43:49.004566Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:49.004578Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:49.004579Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:49.004618Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4344 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:49.058613Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:49.058649Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:49.065673Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:49.071348Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:49.094403Z node 7 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:57072) has now valid token of root@builtin 2025-05-03T08:43:49.107597Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-05-03T08:43:49.107615Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-05-03T08:43:49.107618Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-05-03T08:43:49.107631Z node 7 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-05-03T08:43:49.829991Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137449157383160:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:49.830016Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000132/r3tmp/tmpzCIFKJ/pdisk_1.dat 2025-05-03T08:43:49.850410Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63244, node 10 2025-05-03T08:43:49.873858Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:49.873870Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:49.873872Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:49.873927Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7253 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ... te: Disconnected -> Connecting 2025-05-03T08:43:49.932102Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:49.933655Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:49.958958Z node 10 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:49006) has now valid token of root@builtin 2025-05-03T08:43:49.968131Z node 10 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-05-03T08:43:49.968155Z node 10 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-05-03T08:43:49.968157Z node 10 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-05-03T08:43:49.968169Z node 10 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-05-03T08:43:50.719158Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137452816043631:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:50.719178Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000132/r3tmp/tmpQaTfQ5/pdisk_1.dat 2025-05-03T08:43:50.740480Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2958, node 13 2025-05-03T08:43:50.776454Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:50.776468Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:50.776470Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:50.776511Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:50.819379Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:50.819415Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:50.820898Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:50.825623Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:50.829279Z node 13 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1746269030739359 Nodes { NodeId: 1024 Host: "localhost" Port: 10626 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1746269030739359 } Nodes { NodeId: 13 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 14 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 15 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-05-03T08:43:51.547628Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7500137458548512277:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:51.547709Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000132/r3tmp/tmpOJb50J/pdisk_1.dat 2025-05-03T08:43:51.581890Z node 16 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12745, node 16 2025-05-03T08:43:51.602067Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:51.602080Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:51.602081Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:51.602137Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61139 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:51.647784Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:51.647837Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:51.674670Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:51.674843Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1746269031580040 Nodes { NodeId: 1024 Host: "localhost" Port: 4999 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1746269031580040 } Nodes { NodeId: 16 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 17 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 18 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-05-03T08:43:52.429334Z node 19 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7500137460419457858:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:52.429441Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000132/r3tmp/tmpdLrAeF/pdisk_1.dat 2025-05-03T08:43:52.448943Z node 19 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4098, node 19 2025-05-03T08:43:52.470049Z node 19 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:52.470064Z node 19 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:52.470066Z node 19 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:52.470116Z node 19 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22477 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:52.529553Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:52.529589Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:52.534489Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:52.534863Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node 2025-05-03T08:43:52.601442Z node 19 :TICKET_PARSER ERROR: Ticket 05E726017C656F64C923B20B060A16D3CB110AD9B89A5140AAC8BCA8A8182063: Cannot create token from certificate. Client certificate failed verification Register node result Status { Code: ERROR Reason: "Cannot create token from certificate. Client certificate failed verification" } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::BeginTxRequestError [GOOD] Test command err: 2025-05-03T08:43:48.894723Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137443958557029:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:48.894742Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00012b/r3tmp/tmp2QovVN/pdisk_1.dat 2025-05-03T08:43:48.962878Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27454, node 1 2025-05-03T08:43:48.981647Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:48.981662Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:48.981664Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:48.981716Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:43:48.994926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:48.994950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:48.996440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:49.006686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:49.032803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:49.684109Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137448478834759:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:49.684530Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00012b/r3tmp/tmpajPeOM/pdisk_1.dat 2025-05-03T08:43:49.706235Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11763, node 4 2025-05-03T08:43:49.729267Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:49.729284Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:49.729285Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:49.729332Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:49.784495Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:49.784555Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:49.786324Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:49.790413Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:49.803733Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:50.619238Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137452907505616:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:50.619268Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00012b/r3tmp/tmpmyXn89/pdisk_1.dat 2025-05-03T08:43:50.654988Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4586, node 7 2025-05-03T08:43:50.686172Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:50.686184Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:50.686186Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:50.686242Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:50.719883Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:50.719911Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:50.721444Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:50.728326Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:50.753776Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:50.770665Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-03T08:43:50.837672Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-03T08:43:50.848954Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-03T08:43:50.861487Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-03T08:43:50.894387Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-03T08:43:50.896493Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-03T08:43:50.899165Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-05-03T08:43:51.634672Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137454929567585:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:51.634701Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00012b/r3tmp/tmpJF5Isi/pdisk_1.dat 2025-05-03T08:43:51.650250Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22416, node 10 2025-05-03T08:43:51.680495Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:51.680510Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:51.680512Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:51.680562Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30665 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:51.735162Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:51.735207Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:51.736715Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-03T08:43:51.737354Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:51.741361Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-03T08:43:51.762350Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:52.471600Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137460002380300:2080];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:52.471927Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00012b/r3tmp/tmpa3MZE6/pdisk_1.dat 2025-05-03T08:43:52.493887Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6932, node 13 2025-05-03T08:43:52.530057Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:52.530073Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:52.530075Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:52.530121Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18990 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:52.572332Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:52.572367Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:52.573949Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:52.582504Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:52.811796Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137460002381227:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:52.811798Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137460002381216:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:52.811811Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:52.812602Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-03T08:43:52.816585Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7500137460002381230:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-03T08:43:52.892472Z node 13 :TX_PROXY ERROR: Actor# [13:7500137460002381301:2642] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:52.892784Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=ZTU5MmM5OTgtNGI0YjFjMjUtNzk2NTgwZjItN2NjZWM5MTU=, ActorId: [13:7500137460002381198:2329], ActorState: ExecuteState, TraceId: 01jtan1x3b0am97y0pzj2c3mwd, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2025-05-03T08:43:52.893951Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=ZTU5MmM5OTgtNGI0YjFjMjUtNzk2NTgwZjItN2NjZWM5MTU=, ActorId: [13:7500137460002381198:2329], ActorState: ExecuteState, TraceId: 01jtan1x5x3s41045ss41g9xrz, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2025-05-03T08:43:52.894465Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=ZTU5MmM5OTgtNGI0YjFjMjUtNzk2NTgwZjItN2NjZWM5MTU=, ActorId: [13:7500137460002381198:2329], ActorState: ExecuteState, TraceId: 01jtan1x5yes3qhtbzf9maywbn, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: >> TGRpcNewCoordinationClient::SessionDescribeWatchOwners [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchReplace >> YdbYqlClient::BuildInfo ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestDescribeDirectory [GOOD] Test command err: 2025-05-03T08:43:49.124870Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137446945829360:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:49.124892Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000126/r3tmp/tmpXE6eHT/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11463, node 1 2025-05-03T08:43:49.220313Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-03T08:43:49.225193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:49.225221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:49.226696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:49.226859Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:49.226869Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:49.226871Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:49.226924Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3535 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:49.276900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:49.451995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137446945830315:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:49.452026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137446945830307:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:49.452064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:49.452770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-03T08:43:49.457354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500137446945830321:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-03T08:43:49.520741Z node 1 :TX_PROXY ERROR: Actor# [1:7500137446945830388:2649] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:50.192347Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137451689866328:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:50.192600Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000126/r3tmp/tmpeIklMO/pdisk_1.dat 2025-05-03T08:43:50.226046Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20382, node 4 2025-05-03T08:43:50.232820Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:50.232833Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:50.232834Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:50.232887Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12964 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:50.292684Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:50.292716Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:50.294224Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:50.308810Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:50.543316Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137451689867269:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:50.543339Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137451689867280:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:50.543345Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:50.543985Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-03T08:43:50.547948Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7500137451689867283:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-03T08:43:50.613582Z node 4 :TX_PROXY ERROR: Actor# [4:7500137451689867354:2649] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:51.248972Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137457554660702:2272];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:51.248990Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000126/r3tmp/tmpFunuzr/pdisk_1.dat 2025-05-03T08:43:51.267275Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16734, node 7 2025-05-03T08:43:51.282792Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:51.282806Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:51.282809Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:51.282852Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25835 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-03T08:43:51.348939Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:51.348970Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:51.350392Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:51.358327Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:51.554797Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:51.618900Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137457554661579:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:51.618901Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137457554661568:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:51.618913Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:51.619537Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:51.623188Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7500137457554661582:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:43:51.695527Z node 7 :TX_PROXY ERROR: Actor# [7:7500137457554661659:2762] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:51.709023Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan1vy2ege9fmehnv1baprb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MjA2OTVmYzgtNzhiOWJlYzMtZjY1ZDUyYjMtMWMxODA5YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:51.722482Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jtan1w11dt4gz08rc1kw9wse, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MjA2OTVmYzgtNzhiOWJlYzMtZjY1ZDUyYjMtMWMxODA5YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:51.734643Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jtan1w1d1587mj2h0gjqww0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MjA2OTVmYzgtNzhiOWJlYzMtZjY1ZDUyYjMtMWMxODA5YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:51.746171Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jtan1w1tfgw3fgxn3hyxzn9s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MjA2OTVmYzgtNzhiOWJlYzMtZjY1ZDUyYjMtMWMxODA5YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:51.757814Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jtan1w25fb4mtggmj28gtrsd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MjA2OTVmYzgtNzhiOWJlYzMtZjY1ZDUyYjMtMWMxODA5YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:51.844636Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jtan1w2h7daphnhe3vvhczga, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MjA2OTVmYzgtNzhiOWJlYzMtZjY1ZDUyYjMtMWMxODA5YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:51.845973Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jtan1w2h7daphnhe3vvhczga, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MjA2OTVmYzgtNzhiOWJlYzMtZjY1ZDUyYjMtMWMxODA5YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:52.514169Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137462604352828:2090];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:52.522605Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000126/r3tmp/tmpSuTel4/pdisk_1.dat 2025-05-03T08:43:52.531557Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11348, node 10 2025-05-03T08:43:52.558415Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:52.558450Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:52.558452Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:52.558518Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64483 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:52.613095Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:52.613136Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:52.614673Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:52.618472Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:52.621549Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-03T08:43:52.858376Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137462604353789:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:52.858399Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:52.863126Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> TGRpcAuthentication::DisableLoginAuthentication [GOOD] >> ReadRows::KillTabletDuringRead >> YdbYqlClient::TestReadTableMultiShardWithDescribe [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit >> YdbLogStore::Dirs [GOOD] >> YdbLogStore::LogTable >> YdbYqlClient::TestDoubleKey >> YdbYqlClient::TestTzTypesFullStack >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard [GOOD] >> TGRpcYdbTest::RemoveNotExistedDirectory [GOOD] >> TGRpcYdbTest::SdkUuid >> TGRpcYdbTest::CreateYqlSessionExecuteQuery [GOOD] >> TGRpcYdbTest::DeleteFromAfterCreate >> YdbYqlClient::BuildInfo [GOOD] >> YdbYqlClient::AlterTableAddIndexAsyncOp >> TGRpcNewCoordinationClient::SessionDescribeWatchReplace [GOOD] >> TGRpcNewCoordinationClient::SessionCreateUpdateDeleteSemaphore >> YdbYqlClient::CreateAndAltertTableWithReadReplicasSettings [GOOD] >> YdbYqlClient::CreateTableWithMESettings ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard [GOOD] Test command err: 2025-05-03T08:43:49.033055Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137446313411275:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:49.033082Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000129/r3tmp/tmp59sSWV/pdisk_1.dat 2025-05-03T08:43:49.104217Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3195, node 1 2025-05-03T08:43:49.122375Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:49.122387Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:49.122389Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:49.122434Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:43:49.133711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:49.133752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:49.135381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16811 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:49.179480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:16811 2025-05-03T08:43:49.200570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:49.213936Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7500137446313412292:2322];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-03T08:43:49.217147Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7500137446313412292:2322];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-03T08:43:49.217194Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037890 2025-05-03T08:43:49.217746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137446313412292:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-03T08:43:49.217783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137446313412292:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-03T08:43:49.217833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137446313412292:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-03T08:43:49.217859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137446313412292:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-03T08:43:49.217888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137446313412292:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-03T08:43:49.217923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137446313412292:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-03T08:43:49.217943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137446313412292:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-03T08:43:49.217967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137446313412292:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-05-03T08:43:49.217990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137446313412292:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-03T08:43:49.218012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137446313412292:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-05-03T08:43:49.218034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137446313412292:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-03T08:43:49.218059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137446313412292:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-03T08:43:49.218544Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7500137446313412283:2319];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-03T08:43:49.220739Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7500137446313412283:2319];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-03T08:43:49.221075Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-05-03T08:43:49.221650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137446313412283:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-03T08:43:49.221664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137446313412283:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-03T08:43:49.221701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137446313412283:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-03T08:43:49.221718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137446313412283:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-03T08:43:49.221734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137446313412283:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-03T08:43:49.221754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137446313412283:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-03T08:43:49.221769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137446313412283:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-03T08:43:49.221785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137446313412283:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-05-03T08:43:49.221805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137446313412283:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-03T08:43:49.221822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137446313412283:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-05-03T08:43:49.221838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137446313412283:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-03T08:43:49.221857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137446313412283:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-03T08:43:49.222295Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7500137446313412285:2320];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-05-03T08:43:49.224395Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7500137446313412285:2320];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-05-03T08:43:49.224421Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2025-05-03T08:43:49.224938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7500137446313412285:2320];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-03T08:43:49.224949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7500137446313412285:2320];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-03T08:43:49.224983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7500137446313412285:2320];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-03T08:43:49.225004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id= ... 1746261833666:281474976715658] in PlanQueue unit at 72075186224037888 2025-05-03T08:43:53.622732Z node 13 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1746261833666:281474976715658 keys extracted: 0 2025-05-03T08:43:53.622764Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-03T08:43:53.622782Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-05-03T08:43:53.622790Z node 13 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-05-03T08:43:53.622884Z node 13 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-05-03T08:43:53.622975Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-03T08:43:53.623264Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1746261833665 2025-05-03T08:43:53.623267Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-03T08:43:53.623422Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1746261833673 2025-05-03T08:43:53.623516Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1746261833666} 2025-05-03T08:43:53.623532Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-05-03T08:43:53.623659Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-05-03T08:43:53.623672Z node 13 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-05-03T08:43:53.623681Z node 13 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-05-03T08:43:53.623696Z node 13 :TX_DATASHARD DEBUG: Complete [1746261833666 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [13:7500137465282386883:2207], exec latency: 0 ms, propose latency: 0 ms 2025-05-03T08:43:53.623702Z node 13 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-05-03T08:43:53.623709Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-03T08:43:53.624135Z node 13 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-05-03T08:43:53.624148Z node 13 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=8984;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8984;columns=10; 2025-05-03T08:43:53.626703Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [13:7500137465282387627:2710], serverId# [13:7500137465282387628:2711], sessionId# [0:0:0] 2025-05-03T08:43:53.626741Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-05-03T08:43:53.628442Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-05-03T08:43:53.628457Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 SUCCESS Upsert done: 0.003672s 2025-05-03T08:43:53.630251Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137465282387636:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:53.630251Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137465282387647:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:53.630270Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:53.630916Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:53.632001Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-03T08:43:53.634671Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-05-03T08:43:53.635406Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7500137465282387650:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:43:53.713192Z node 13 :TX_PROXY ERROR: Actor# [13:7500137465282387723:2773] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:53.725833Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-03T08:43:53.725874Z node 13 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976715661 at tablet 72075186224037888 2025-05-03T08:43:53.729283Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-03T08:43:53.729976Z node 13 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715661 at step 1746261833778 at tablet 72075186224037888 { Transactions { TxId: 281474976715661 AckTo { RawX1: 0 RawX2: 0 } } Step: 1746261833778 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-05-03T08:43:53.729990Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-03T08:43:53.730016Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-05-03T08:43:53.730021Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-05-03T08:43:53.730029Z node 13 :TX_DATASHARD DEBUG: Found ready operation [1746261833778:281474976715661] in PlanQueue unit at 72075186224037888 2025-05-03T08:43:53.730068Z node 13 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1746261833778:281474976715661 keys extracted: 0 2025-05-03T08:43:53.730142Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-03T08:43:53.730842Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1746261833778} 2025-05-03T08:43:53.730861Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-05-03T08:43:53.730872Z node 13 :TX_DATASHARD DEBUG: Complete [1746261833778 : 281474976715661] from 72075186224037888 at tablet 72075186224037888 send result to client [13:7500137465282387750:2787], exec latency: 0 ms, propose latency: 0 ms 2025-05-03T08:43:53.730878Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-03T08:43:53.731096Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jtan1xwx1av700ydzgtazgcj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDAyZjM2YTktNzAxNTE2OGEtNTQ0ODU5MmMtZGM1MTQ3Nzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:53.731700Z node 13 :TX_DATASHARD INFO: Start scan, at: [13:7500137465282387779:2074], tablet: [13:7500137465282387522:2336], scanId: 4, table: /Root/LogsX, gen: 1, deadline: 2025-05-03T08:53:53.731543Z 2025-05-03T08:43:53.731753Z node 13 :TX_DATASHARD DEBUG: Got ScanDataAck, at: [13:7500137465282387779:2074], scanId: 4, table: /Root/LogsX, gen: 1, tablet: [13:7500137465282387522:2336], freeSpace: 8388608;limits:(bytes=0;chunks=0); 2025-05-03T08:43:53.731762Z node 13 :TX_DATASHARD DEBUG: Wakeup driver at: [13:7500137465282387779:2074] 2025-05-03T08:43:53.731905Z node 13 :TX_DATASHARD DEBUG: Range 0 of 1 exhausted: try next one. table: /Root/LogsX range: [(Utf8 : NULL, Timestamp : NULL) ; ()) next range: 2025-05-03T08:43:53.731918Z node 13 :TX_DATASHARD DEBUG: TableRanges is over, at: [13:7500137465282387779:2074], scanId: 4, table: /Root/LogsX 2025-05-03T08:43:53.731923Z node 13 :TX_DATASHARD DEBUG: Finish scan, at: [13:7500137465282387779:2074], scanId: 4, table: /Root/LogsX, reason: 0, abortEvent: 2025-05-03T08:43:53.731929Z node 13 :TX_DATASHARD DEBUG: Send ScanData, from: [13:7500137465282387779:2074], to: [13:7500137465282387774:2355], scanId: 4, table: /Root/LogsX, bytes: 11000, rows: 100, page faults: 0, finished: 1, pageFault: 0 2025-05-03T08:43:53.731993Z node 13 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-05-03T08:43:53.732012Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-05-03T08:43:53.732020Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-05-03T08:43:53.732024Z node 13 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-05-03T08:43:53.732028Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-05-03T08:43:53.733849Z node 13 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1746261833778, txId: 281474976715661] shutting down 2025-05-03T08:43:53.780680Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jtan1y0af9jbb8y6c47est9r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZWY1NTNkNTUtZjllZmJkMDYtODkxYjk2NmMtNzgxNjRkMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 100 rows Negative (wrong format): BAD_REQUEST Negative (wrong data): SCHEME_ERROR FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=8016;columns=9; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8016;columns=9; 2025-05-03T08:43:53.790824Z node 13 :ARROW_HELPER ERROR: fline=arrow_helpers.cpp:142;event=cannot_parse;message=Invalid: Ran out of field metadata, likely malformed;schema_columns_count=10;schema_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; Negative (less columns): BAD_REQUEST FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=8984;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8984;columns=10; 2025-05-03T08:43:53.791839Z node 13 :ARROW_HELPER ERROR: fline=arrow_helpers.cpp:142;event=cannot_parse;message=Serialization error: batch is not valid: Invalid: Offsets buffer size (bytes): 400 isn't large enough for length: 100;schema_columns_count=10;schema_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; Negative (reordered columns): BAD_REQUEST >> TGRpcYdbTest::CreateTableBadRequest >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyHosts >> YdbYqlClient::TestReadTableOneBatch [GOOD] >> YdbYqlClient::TestReadTableNotNullBorder >> TTableProfileTests::OverwriteCompactionPolicy >> YdbOlapStore::LogGrepNonExisting [GOOD] >> YdbOlapStore::LogGrepExisting >> YdbYqlClient::TestDoubleKey [GOOD] >> YdbYqlClient::TestMultipleModifications >> ReadRows::KillTabletDuringRead [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [GOOD] >> YdbYqlClient::TestTzTypesFullStack [GOOD] >> YdbYqlClient::TestVariant >> TGRpcNewCoordinationClient::SessionCreateUpdateDeleteSemaphore [GOOD] >> YdbYqlClient::CreateTableWithMESettings [GOOD] >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 [GOOD] >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent >> TGRpcYdbTest::SdkUuid [GOOD] >> TGRpcYdbTest::SdkUuidViaParams >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> ReadRows::KillTabletDuringRead [GOOD] Test command err: 2025-05-03T08:43:43.219954Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137424063346763:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:43.219972Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00014c/r3tmp/tmpC0i06Z/pdisk_1.dat 2025-05-03T08:43:43.295100Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22443, node 1 2025-05-03T08:43:43.320159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:43.320186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:43.324509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:43.329429Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:43.329443Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:43.329444Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:43.329481Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:43.367720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:16334 TClient is connected to server localhost:16334 2025-05-03T08:43:43.413757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-03T08:43:43.746980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137424063347739:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:43.747006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137424063347750:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:43.747012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:43.747655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-03T08:43:43.751694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500137424063347753:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-03T08:43:43.804014Z node 1 :TX_PROXY ERROR: Actor# [1:7500137424063347826:2678] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } TClient is connected to server localhost:16334 2025-05-03T08:43:43.856785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-03T08:43:44.768883Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:452:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-03T08:43:44.768927Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-03T08:43:44.768945Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00014c/r3tmp/tmpaMpA0z/pdisk_1.dat 2025-05-03T08:43:44.962580Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-03T08:43:45.016944Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:45.016980Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:45.028466Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:45.299065Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [4:1004:2808], Recipient [4:532:2446]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-05-03T08:43:45.299090Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-05-03T08:43:45.299096Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-05-03T08:43:45.299116Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [4:1001:2806], Recipient [4:532:2446]: {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-03T08:43:45.299121Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-05-03T08:43:45.307288Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateSubDomain SubDomain { Name: "tenant" } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-03T08:43:45.307352Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/tenant, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:45.307369Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: tenant, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-05-03T08:43:45.307419Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-05-03T08:43:45.307459Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-03T08:43:45.307482Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-03T08:43:45.307488Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:45.307496Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-05-03T08:43:45.307505Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-05-03T08:43:45.307513Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-03T08:43:45.308303Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-05-03T08:43:45.308337Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/tenant 2025-05-03T08:43:45.308345Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-05-03T08:43:45.308352Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715657:0 2025-05-03T08:43:45.308417Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [4:532:2446], Recipient [4:532:2446]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-05-03T08:43:45.308423Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-05-03T08:43:45.308456Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-03T08:43:45.308462Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-05-03T08:43:45.308496Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-05-03T08:43:45.308511Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-03T08:43:45.308514Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:750:2598], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2025-05-03T08:43:45.308518Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:750:2598], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 2 2025-05-03T08:43:45.308633Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:45.308644Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxCreateSubDomain, at tablet# 72057594046644480 2025-05-03T08:43:45.308649Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 28147497 ... eteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 1 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 2 HasLoanedParts: false Channels { Channel: 1 DataSize: 30 IndexSize: 0 } Channels { Channel: 2 DataSize: 45 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82344 } ShardState: 2 UserTablePartOwners: 72075186224037890 NodeId: 5 StartTime: 1458 TableOwnerId: 72057594046644480 FollowerId: 0 2025-05-03T08:43:51.913106Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-05-03T08:43:51.913117Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 75 rowCount 1 cpuUsage 0 2025-05-03T08:43:51.913134Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] raw table stats: DataSize: 75 RowCount: 1 IndexSize: 0 InMemSize: 0 LastAccessTime: 2027 LastUpdateTime: 2027 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 1 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 1 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 2 HasLoanedParts: false Channels { Channel: 1 DataSize: 30 IndexSize: 0 } Channels { Channel: 2 DataSize: 45 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-05-03T08:43:51.913144Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-05-03T08:43:51.920709Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jtan1w797dqvxsr4a0dbrkma, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Y2E1ZDY0YWUtN2E3ZTMyYmMtNmM0MjhiMTQtYTNhYWZlNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:52.448703Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7500137460435440892:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:52.448756Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00014c/r3tmp/tmpDUvyDX/pdisk_1.dat 2025-05-03T08:43:52.473040Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15275, node 6 2025-05-03T08:43:52.497878Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:52.497890Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:52.497891Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:52.497958Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8817 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:52.550003Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:52.550045Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:52.551740Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:52.558040Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:8817 2025-05-03T08:43:53.290807Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500137467244260906:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:53.290883Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00014c/r3tmp/tmpdX4xqn/pdisk_1.dat 2025-05-03T08:43:53.324027Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10024, node 9 2025-05-03T08:43:53.335275Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:53.335290Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:53.335291Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:53.335355Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27765 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:53.391176Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:53.391228Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:53.392766Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:53.403022Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:54.417719Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:310:2353], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-03T08:43:54.417782Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-03T08:43:54.417793Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00014c/r3tmp/tmpgxGp1m/pdisk_1.dat 2025-05-03T08:43:54.519346Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-05-03T08:43:54.534412Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-05-03T08:43:54.576423Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:54.576457Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:54.586997Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:54.661534Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:54.858713Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:731:2613], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:54.858739Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:741:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:54.858747Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:54.859597Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-03T08:43:54.996115Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:745:2621], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-03T08:43:55.027634Z node 12 :TX_PROXY ERROR: Actor# [12:815:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:55.038467Z node 12 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jtan1z3ad0baprga2ta2jjxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=NDY4MzkxYzYtYWRjZDllZTctMjUyOTNlYjYtNDg0YTNkYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Stoping tablet id: 720751862240378882025-05-03T08:43:55.051482Z node 12 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Failed to connect to shard 72075186224037888 >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 >> TGRpcYdbTest::CreateTableBadRequest [GOOD] >> TGRpcYdbTest::CreateTableBadRequest2 >> TGRpcYdbTest::DeleteFromAfterCreate [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [GOOD] Test command err: 2025-05-03T08:43:50.579317Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137450496225805:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:50.579361Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00011d/r3tmp/tmpwuGdmz/pdisk_1.dat 2025-05-03T08:43:50.681331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:50.681356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 3909, node 1 2025-05-03T08:43:50.682967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:50.691226Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-03T08:43:50.698612Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:50.698626Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:50.698628Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:50.698665Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:50.742168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:50.769019Z node 1 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jtan1v3g1cfsdarmgz14zc54, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:60042, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.999019s 2025-05-03T08:43:50.773747Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jtan1v3kb5vnpewvegpddar8, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:60042, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-05-03T08:43:50.965154Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01jtan1v9n06x7dzjxr5edn1z9, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:60042, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-05-03T08:43:50.965310Z node 1 :TX_PROXY DEBUG: actor# [1:7500137450496226031:2140] Handle TEvProposeTransaction 2025-05-03T08:43:50.965324Z node 1 :TX_PROXY DEBUG: actor# [1:7500137450496226031:2140] TxId# 281474976715658 ProcessProposeTransaction 2025-05-03T08:43:50.965341Z node 1 :TX_PROXY DEBUG: actor# [1:7500137450496226031:2140] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7500137450496226748:2598] 2025-05-03T08:43:50.972467Z node 1 :TX_PROXY DEBUG: Actor# [1:7500137450496226748:2598] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Test" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Fk" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" KeyColumnNames: "Fk" UniformPartitionsCount: 16 PartitionConfig { } Temporary: false } CreateIndexedTable { } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:60042" 2025-05-03T08:43:50.972492Z node 1 :TX_PROXY DEBUG: Actor# [1:7500137450496226748:2598] txid# 281474976715658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-03T08:43:50.972617Z node 1 :TX_PROXY DEBUG: Actor# [1:7500137450496226748:2598] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-03T08:43:50.972632Z node 1 :TX_PROXY DEBUG: Actor# [1:7500137450496226748:2598] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-03T08:43:50.972674Z node 1 :TX_PROXY DEBUG: Actor# [1:7500137450496226748:2598] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-03T08:43:50.972717Z node 1 :TX_PROXY DEBUG: Actor# [1:7500137450496226748:2598] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-03T08:43:50.972736Z node 1 :TX_PROXY DEBUG: Actor# [1:7500137450496226748:2598] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-03T08:43:50.972799Z node 1 :TX_PROXY DEBUG: Actor# [1:7500137450496226748:2598] txid# 281474976715658 HANDLE EvClientConnected 2025-05-03T08:43:50.973289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:50.974022Z node 1 :TX_PROXY DEBUG: Actor# [1:7500137450496226748:2598] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-05-03T08:43:50.974037Z node 1 :TX_PROXY DEBUG: Actor# [1:7500137450496226748:2598] txid# 281474976715658 SEND to# [1:7500137450496226747:2333] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-05-03T08:43:50.974256Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:43:50.974271Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:43:50.974284Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:43:50.974288Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:43:50.980948Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137450496226778:2626], Recipient [1:7500137450496226913:2340]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:43:50.980948Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137450496226790:2638], Recipient [1:7500137450496226915:2342]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:43:50.981223Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137450496226789:2637], Recipient [1:7500137450496226921:2348]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:43:50.981227Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137450496226781:2629], Recipient [1:7500137450496226919:2346]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:43:50.981359Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137450496226788:2636], Recipient [1:7500137450496226922:2349]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:43:50.981365Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137450496226786:2634], Recipient [1:7500137450496226920:2347]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:43:50.981537Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137450496226779:2627], Recipient [1:7500137450496226914:2341]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:43:50.981552Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137450496226782:2630], Recipient [1:7500137450496226916:2343]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:43:50.981673Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137450496226791:2639], Recipient [1:7500137450496226907:2338]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:43:50.981681Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137450496226785:2633], Recipient [1:7500137450496226905:2336]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:43:50.981808Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137450496226777:2625], Recipient [1:7500137450496226941:2350]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:43:50.981809Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137450496226780:2628], Recipient [1:7500137450496226917:2344]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:43:50.981938Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137450496226784:2632], Recipient [1:7500137450496226953:2351]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:43:50.981951Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137450496226776:2624], Recipient [1:7500137450496226906:2337]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:43:50.982049Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7500137450496226784:2632], Recipient [1:7500137450496226953:2351]: NKikimr::TEvTablet::TEvRestored 2025-05-03T08:43:50.982065Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7500137450496226776:2624], Recipient [1:7500137450496226906:2337]: NKikimr::TEvTablet::TEvRestored 2025-05-03T08:43:50.982145Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7500137450496226906:2337] 2025-05-03T08:43:50.982145Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037899 actor [1:7500137450496226953:2351] 2025-05-03T08:43:50.982293Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-05-03T08:43:50.982300Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-05-03T08:43:50.983249Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:7500137450496226776:2624], Recipient [1:7500137450496226906:2337]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-03T08:43:50.983272Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7500137450496226788:2636], Recipient [1:7500137450496226922:2349]: NKikimr::TEvTablet::TEvRestored 2025-05-03T08:43:50.983323Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:7500137450496226784:2632], Recipient [1:7500137450496226953:2351]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-03T08:43:50.983348Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7500137450496226789:2637], Recipient [1:7500137450496226921:2348]: NKikimr::TEvTablet::TEvRestored 2025-05-03T08:43:50.983352Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [1:7500137450496226922:2349] 2025-05-03T08:43:50.983398Z node 1 :TX_DATASHARD DEBU ... 7571Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037897 2025-05-03T08:43:54.717574Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 active 1 active planned 0 immediate 1 planned 0 2025-05-03T08:43:54.717577Z node 10 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715681] at 72075186224037897 for WaitForStreamClearance 2025-05-03T08:43:54.717579Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit WaitForStreamClearance 2025-05-03T08:43:54.717582Z node 10 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715681] at 72075186224037897 2025-05-03T08:43:54.717584Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2025-05-03T08:43:54.717586Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit WaitForStreamClearance 2025-05-03T08:43:54.717588Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit ReadTableScan 2025-05-03T08:43:54.717590Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit ReadTableScan 2025-05-03T08:43:54.717619Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Continue 2025-05-03T08:43:54.717626Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-05-03T08:43:54.717628Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2025-05-03T08:43:54.717630Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-05-03T08:43:54.717632Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-05-03T08:43:54.717636Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-05-03T08:43:54.717691Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [10:7500137468307810333:2083], Recipient [10:7500137468307809373:2337]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-05-03T08:43:54.717698Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-05-03T08:43:54.717707Z node 10 :READ_TABLE_API DEBUG: [10:7500137468307810315:2397] Adding quota request to queue ShardId: 0, TxId: 281474976715680 2025-05-03T08:43:54.717717Z node 10 :READ_TABLE_API DEBUG: [10:7500137468307810315:2397] Assign stream quota to Shard 0, Quota 5, TxId 281474976715680 Reserved: 5 of 25, Queued: 0 2025-05-03T08:43:54.717746Z node 10 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037897, TxId: 281474976715681, MessageQuota: 5 2025-05-03T08:43:54.717794Z node 10 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037897, TxId: 281474976715681, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 4 2025-05-03T08:43:54.717812Z node 10 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037897, TxId: 281474976715681, PendingAcks: 0 2025-05-03T08:43:54.717816Z node 10 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037897, TxId: 281474976715681, MessageQuota: 4 2025-05-03T08:43:54.717838Z node 10 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037897 2025-05-03T08:43:54.717839Z node 10 :READ_TABLE_API DEBUG: [10:7500137468307810315:2397] got stream part, size: 75, RU required: 128 rate limiter absent 2025-05-03T08:43:54.717848Z node 10 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715681, at: 72075186224037897 2025-05-03T08:43:54.717869Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269549569, Sender [10:7500137468307810316:2397], Recipient [10:7500137468307809373:2337]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715681 2025-05-03T08:43:54.717871Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2025-05-03T08:43:54.717873Z node 10 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037897 txId 281474976715681 2025-05-03T08:43:54.717895Z node 10 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037897 txId 281474976715681 2025-05-03T08:43:54.717925Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269287431, Sender [10:7500137468307810316:2397], Recipient [10:7500137468307809373:2337]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715681 2025-05-03T08:43:54.717927Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2025-05-03T08:43:54.717950Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7500137468307810316:2397], Recipient [10:7500137468307809373:2337]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1746261834765 TxId: 281474976715680 2025-05-03T08:43:54.717985Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [10:7500137468307809373:2337], Recipient [10:7500137468307809373:2337]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-03T08:43:54.717991Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-03T08:43:54.717994Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037897 2025-05-03T08:43:54.717997Z node 10 :READ_TABLE_API DEBUG: [10:7500137468307810315:2397] Starting inactivity timer for 600.000000s with tag 3 2025-05-03T08:43:54.717997Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 active 1 active planned 0 immediate 1 planned 0 2025-05-03T08:43:54.718001Z node 10 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715681] at 72075186224037897 for ReadTableScan 2025-05-03T08:43:54.718004Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit ReadTableScan 2025-05-03T08:43:54.718009Z node 10 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715681] at 72075186224037897 error: , IsFatalError: 0 2025-05-03T08:43:54.718011Z node 10 :READ_TABLE_API NOTICE: [10:7500137468307810315:2397] Finish grpc stream, status: 400000 2025-05-03T08:43:54.718014Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2025-05-03T08:43:54.718016Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit ReadTableScan 2025-05-03T08:43:54.718019Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit FinishPropose 2025-05-03T08:43:54.718020Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit FinishPropose 2025-05-03T08:43:54.718040Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is DelayCompleteNoMoreRestarts 2025-05-03T08:43:54.718042Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit FinishPropose 2025-05-03T08:43:54.718044Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit CompletedOperations 2025-05-03T08:43:54.718046Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit CompletedOperations 2025-05-03T08:43:54.718051Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2025-05-03T08:43:54.718053Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit CompletedOperations 2025-05-03T08:43:54.718054Z node 10 :TX_DATASHARD TRACE: Execution plan for [0:281474976715681] at 72075186224037897 has finished 2025-05-03T08:43:54.718056Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-03T08:43:54.718058Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2025-05-03T08:43:54.718059Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-05-03T08:43:54.718061Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-05-03T08:43:54.718848Z node 10 :GRPC_SERVER DEBUG: Got grpc request# DeleteSessionRequest, traceId# 01jtan1yyy5vn099k49a8cdk8a, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# unknown, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 2.009183s 2025-05-03T08:43:54.719035Z node 10 :GRPC_SERVER DEBUG: [0x448a7f6edf00] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-05-03T08:43:54.719043Z node 10 :GRPC_SERVER DEBUG: [0x448a7f6d1800] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-05-03T08:43:54.719098Z node 10 :GRPC_SERVER DEBUG: [0x448a7f6e2100] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-05-03T08:43:54.719123Z node 10 :GRPC_SERVER DEBUG: [0x448a7f6c5000] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-05-03T08:43:54.719153Z node 10 :GRPC_SERVER DEBUG: [0x448a7f6c5500] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-05-03T08:43:54.719173Z node 10 :GRPC_SERVER DEBUG: [0x448a7f6c1e00] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-05-03T08:43:54.719195Z node 10 :GRPC_SERVER DEBUG: [0x448a7f6ce100] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-05-03T08:43:54.719219Z node 10 :GRPC_SERVER DEBUG: [0x448a7f6d6800] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-05-03T08:43:54.719242Z node 10 :GRPC_SERVER DEBUG: [0x448a7f6f0700] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-05-03T08:43:54.719257Z node 10 :GRPC_SERVER DEBUG: [0x448a7f6cf500] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-05-03T08:43:54.719263Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-05-03T08:43:54.719267Z node 10 :TX_DATASHARD TRACE: Complete execution for [0:281474976715681] at 72075186224037897 on unit FinishPropose 2025-05-03T08:43:54.719273Z node 10 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715681 at tablet 72075186224037897 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-05-03T08:43:54.719278Z node 10 :GRPC_SERVER DEBUG: [0x448a7f6c0a00] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-05-03T08:43:54.719289Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037897 2025-05-03T08:43:54.719318Z node 10 :GRPC_SERVER DEBUG: [0x448a7f6c5a00] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-05-03T08:43:54.719322Z node 10 :GRPC_SERVER DEBUG: [0x448a7f6d2700] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-05-03T08:43:54.719355Z node 10 :GRPC_SERVER DEBUG: [0x448a7f6c0500] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-05-03T08:43:54.719360Z node 10 :GRPC_SERVER DEBUG: [0x448a7f6d1300] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-05-03T08:43:54.719383Z node 10 :GRPC_SERVER DEBUG: [0x448a7ecc5800] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-05-03T08:43:54.719391Z node 10 :GRPC_SERVER DEBUG: [0x448a7f6eda00] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyHosts [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBaseDn >> YdbYqlClient::AlterTableAddIndexAsyncOp [GOOD] >> YdbYqlClient::AlterTableAddIndexWithDataColumn ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateTableWithMESettings [GOOD] Test command err: 2025-05-03T08:43:50.449998Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137453452819755:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:50.450022Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000120/r3tmp/tmpHbE6eK/pdisk_1.dat 2025-05-03T08:43:50.509644Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1406, node 1 2025-05-03T08:43:50.530529Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:50.530539Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:50.530540Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:50.530576Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:43:50.550609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:50.550637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:10767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-03T08:43:50.552096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:50.563766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:50.764529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:51.519391Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137456119411175:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:51.519426Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000120/r3tmp/tmpJgi1aO/pdisk_1.dat 2025-05-03T08:43:51.541935Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2476, node 4 2025-05-03T08:43:51.557578Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:51.557600Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:51.557602Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:51.557649Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:51.619649Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:51.619674Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:51.621165Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:51.630654Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:51.904875Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:51.921778Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-03T08:43:51.933858Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-03T08:43:51.948239Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-03T08:43:52.544568Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137460034401514:2080];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:52.544598Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000120/r3tmp/tmpDdDiMc/pdisk_1.dat 2025-05-03T08:43:52.562184Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27686, node 7 2025-05-03T08:43:52.579294Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:52.579302Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:52.579303Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:52.579332Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:52.645054Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:52.645094Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:52.649454Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:52.649536Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:52.867453Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:52.927360Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-03T08:43:52.934310Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-03T08:43:53.600173Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137465764808759:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:53.600209Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000120/r3tmp/tmp68LCtD/pdisk_1.dat 2025-05-03T08:43:53.617585Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6773, node 10 2025-05-03T08:43:53.629867Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:53.629880Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:53.629882Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:53.629933Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:53.700436Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:53.700468Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:53.702024Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:53.704658Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:53.937784Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:53.998525Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-03T08:43:54.000795Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-05-03T08:43:54.000809Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-05-03T08:43:54.666374Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137467915276302:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:54.666397Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000120/r3tmp/tmpfy1ArA/pdisk_1.dat 2025-05-03T08:43:54.680898Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4553, node 13 2025-05-03T08:43:54.693864Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:54.693876Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:54.693894Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:54.693936Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13761 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:54.764827Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:54.764849Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:54.766268Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:54.770059Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:54.926774Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::SessionCreateUpdateDeleteSemaphore [GOOD] Test command err: 2025-05-03T08:43:51.150122Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137454741954618:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:51.150206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000116/r3tmp/tmpM3wBJc/pdisk_1.dat 2025-05-03T08:43:51.257636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:51.257659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:51.259047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11623, node 1 2025-05-03T08:43:51.274555Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-03T08:43:51.274996Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:51.275007Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:51.275013Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:51.275049Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22017 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:51.329491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:51.342886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:52.091101Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137460479765394:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:52.091154Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000116/r3tmp/tmpfAWvxN/pdisk_1.dat 2025-05-03T08:43:52.112309Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62455, node 4 2025-05-03T08:43:52.132295Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:52.132306Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:52.132307Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:52.132352Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6775 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:52.191376Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:52.191410Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:52.192936Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:52.198166Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:52.219446Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:52.901454Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137462396912699:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:52.901599Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000116/r3tmp/tmpMVcUCY/pdisk_1.dat 2025-05-03T08:43:52.919132Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11564, node 7 2025-05-03T08:43:52.938079Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:52.938090Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:52.938092Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:52.938131Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27862 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:53.001898Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:53.001938Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:53.003392Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:53.011852Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:53.041365Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:53.766404Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137466704506108:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:53.766434Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000116/r3tmp/tmp0dYuuN/pdisk_1.dat 2025-05-03T08:43:53.790155Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15521, node 10 2025-05-03T08:43:53.814854Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:53.814868Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:53.814870Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:53.814933Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18239 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:53.866661Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:53.866692Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:53.868232Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:53.878038Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:53.893863Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:54.624156Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137469964673043:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:54.624176Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000116/r3tmp/tmp3uf7aR/pdisk_1.dat 2025-05-03T08:43:54.637821Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14567, node 13 2025-05-03T08:43:54.660563Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:54.660575Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:54.660577Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:54.660613Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6072 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:54.727252Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:54.727282Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:54.728203Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:54.728529Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-03T08:43:54.739946Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::DeleteFromAfterCreate [GOOD] Test command err: 2025-05-03T08:43:50.746834Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137451284896340:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:50.746866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00011a/r3tmp/tmpS2zgvG/pdisk_1.dat 2025-05-03T08:43:50.820531Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4362, node 1 2025-05-03T08:43:50.851475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:50.851497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:50.856052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:50.870229Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:50.870243Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:50.870245Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:50.870285Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:50.911002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:50.925095Z node 1 :TX_PROXY ERROR: Actor# [1:7500137451284897245:2593] txid# 281474976715658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-05-03T08:43:51.582672Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137457463751944:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:51.582697Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00011a/r3tmp/tmpnz54Gs/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23502, node 4 2025-05-03T08:43:51.616144Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-05-03T08:43:51.634731Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:51.634746Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:51.634750Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:51.634788Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20173 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:51.683087Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:51.683113Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:51.684616Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:51.691506Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:51.706969Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:51.732798Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-03T08:43:51.887701Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137457463753318:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:51.887718Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137457463753307:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:51.887792Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:51.888582Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-03T08:43:51.892505Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7500137457463753321:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-03T08:43:51.944208Z node 4 :TX_PROXY ERROR: Actor# [4:7500137457463753396:3001] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:52.031846Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jtan1w6f1dck0b5ekv1y0120, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NTRhOGJkMDAtY2Q2OGZmZGMtNDBlNzRhYmEtZDIwNmMzODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:52.033783Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jtan1w6f1dck0b5ekv1y0120, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NTRhOGJkMDAtY2Q2OGZmZGMtNDBlNzRhYmEtZDIwNmMzODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:52.054292Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jtan1wb67hbnmbcfvvnk88wy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YmQ2ODExN2UtOTdlNmUyMzMtMmY2NjU5OTgtOTQ1YjBmMTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:52.641059Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137460531016491:2079];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:52.641081Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00011a/r3tmp/tmpzJEz95/pdisk_1.dat 2025-05-03T08:43:52.654345Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2261, node 7 2025-05-03T08:43:52.674272Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:52.674286Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:52.674288Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:52.674339Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9281 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:52.741416Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:52.741465Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:52.742891Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:52.743690Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:53.717669Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137463421555295:2078];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:53.717905Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00011a/r3tmp/tmp1N91by/pdisk_1.dat 2025-05-03T08:43:53.733649Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5648, node 10 2025-05-03T08:43:53.765990Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:53.766001Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:53.766002Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:53.766043Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4038 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:53.818138Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:53.818176Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:53.819697Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:53.824509Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:54.002483Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137467716523513:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:54.002503Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:54.002542Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137467716523525:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:54.003179Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-03T08:43:54.006171Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7500137467716523527:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-03T08:43:54.063360Z node 10 :TX_PROXY ERROR: Actor# [10:7500137467716523606:2654] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:54.080413Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7500137467716523635:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[Root/NotFound]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-03T08:43:54.080507Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=M2RhNjIyMzEtNmE0ZTUyZTUtOWMyNzhlMDQtNDFkYTRmYWE=, ActorId: [10:7500137467716523495:2329], ActorState: ExecuteState, TraceId: 01jtan1yawc60h9vx2we4c58nk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-03T08:43:54.575507Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137470801611154:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:54.575589Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00011a/r3tmp/tmpiTzp11/pdisk_1.dat 2025-05-03T08:43:54.591368Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13746, node 13 2025-05-03T08:43:54.607743Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:54.607758Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:54.607759Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:54.607807Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:54.675701Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:54.675730Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:54.677202Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:54.679256Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:54.691542Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:54.754284Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-03T08:43:54.928525Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137470801612349:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:54.928544Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137470801612341:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:54.928560Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:54.929194Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-03T08:43:54.932980Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7500137470801612355:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-03T08:43:55.031632Z node 13 :TX_PROXY ERROR: Actor# [13:7500137475096579723:2856] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:55.045745Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jtan1z5g9s8c87xchz1scphp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OWYyZWNiNGUtYjAyMDJmNjgtZWM0YTc1MDgtOTI3MzhhMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:55.056488Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jtan1z97a62cf5q5jdpbamma, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OWYyZWNiNGUtYjAyMDJmNjgtZWM0YTc1MDgtOTI3MzhhMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> YdbYqlClient::TestReadTableNotNullBorder [GOOD] >> YdbYqlClient::TestReadTableNotNullBorder2 >> YdbOlapStore::LogNonExistingUserId [GOOD] >> YdbOlapStore::LogPagingBefore >> TGRpcYdbTest::CreateTableBadRequest2 [GOOD] >> TGRpcYdbTest::CreateTableBadRequest3 >> YdbYqlClient::TestMultipleModifications [GOOD] >> YdbYqlClient::TestDescribeTableWithShardStats >> YdbYqlClient::TestVariant [GOOD] >> YdbYqlClient::TestTransactionQueryError >> TGRpcYdbTest::SdkUuidViaParams [GOOD] >> TGRpcYdbTest::ReadTable >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBaseDn [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindDn >> ClientStatsCollector::PrepareQuery >> YdbYqlClient::SecurityTokenAuth >> YdbYqlClient::TestColumnOrder >> YdbYqlClient::AlterTableAddIndexWithDataColumn [GOOD] >> YdbYqlClient::CheckDefaultTableSettings1 >> GrpcConnectionStringParserTest::NoDatabaseFlag >> YdbTableBulkUpsert::Simple >> TGRpcYdbTest::CreateTableBadRequest3 [GOOD] >> TGRpcYdbTest::CreateAlterCopyAndDropTable >> TTableProfileTests::OverwriteCompactionPolicy [GOOD] >> TTableProfileTests::OverwriteExecutionPolicy >> YdbYqlClient::TestReadTableNotNullBorder2 [GOOD] >> YdbYqlClient::TestReadTableSnapshot >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindDn [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindPassword >> YdbYqlClient::TestDescribeTableWithShardStats [GOOD] >> YdbYqlClient::TestExplicitPartitioning >> YdbYqlClient::TestTransactionQueryError [GOOD] >> YdbYqlClient::TestReadWrongTable >> ClientStatsCollector::PrepareQuery [GOOD] >> ClientStatsCollector::CounterCacheMiss >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesEmptyClientCerts >> TGRpcYdbTest::ReadTable [GOOD] >> TGRpcYdbTest::ReadTablePg >> GrpcConnectionStringParserTest::NoDatabaseFlag [GOOD] >> GrpcConnectionStringParserTest::IncorrectConnectionString [GOOD] >> GrpcConnectionStringParserTest::CommonClientSettingsFromConnectionString >> YdbYqlClient::CheckDefaultTableSettings1 [GOOD] >> YdbYqlClient::CheckDefaultTableSettings2 >> YdbYqlClient::SecurityTokenAuth [GOOD] >> YdbYqlClient::RetryOperationTemplate >> TGRpcYdbTest::CreateAlterCopyAndDropTable [GOOD] >> TGRpcYdbTest::CreateDeleteYqlSession >> YdbYqlClient::TestColumnOrder [GOOD] >> YdbYqlClient::TestDecimal >> YdbYqlClient::TestReadTableSnapshot [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindPassword [GOOD] >> TGRpcLdapAuthentication::LdapAuthSetIncorrectDomain >> YdbOlapStore::LogGrepExisting [GOOD] >> YdbOlapStore::LogExistingRequest >> test_dml.py::TestDML::test_dml[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] >> YdbTableBulkUpsert::Simple [GOOD] >> YdbTableBulkUpsert::SyncIndexShouldSucceed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableSnapshot [GOOD] Test command err: 2025-05-03T08:43:53.521665Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137465411104447:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:53.521787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000113/r3tmp/tmpW5GFZv/pdisk_1.dat 2025-05-03T08:43:53.584520Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6123, node 1 2025-05-03T08:43:53.603443Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:53.603454Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:53.603455Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:53.603496Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61144 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-03T08:43:53.622153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:53.622186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:53.623659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:53.631114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:53.877201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137465411105380:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:53.877236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:53.913290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:53.979334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137465411105548:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:53.979357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:53.979397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137465411105553:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:53.980031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:53.983360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500137465411105555:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-05-03T08:43:54.061597Z node 1 :TX_PROXY ERROR: Actor# [1:7500137469706072922:2766] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:54.074204Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jtan1y7v9w68pgadxycst7ze, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODM3ZTcwNGUtYTRiNDZmM2MtN2IxZjg3MjAtNWVjMzQ4ZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:54.096612Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jtan1yayfb8yzpfdr1m5zkc6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODM3ZTcwNGUtYTRiNDZmM2MtN2IxZjg3MjAtNWVjMzQ4ZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:54.098833Z node 1 :TX_PROXY ERROR: [ReadTable [1:7500137469706072983:2364] TxId# 281474976710663] RESPONSE Status# ResolveError shard: 0 table: Root/Test 2025-05-03T08:43:54.099677Z node 1 :TX_PROXY ERROR: [ReadTable [1:7500137469706072986:2365] TxId# 281474976710664] RESPONSE Status# ResolveError shard: 0 table: Root/Test 2025-05-03T08:43:54.775186Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137468152524162:2212];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:54.775865Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000113/r3tmp/tmpOltZgV/pdisk_1.dat 2025-05-03T08:43:54.789476Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12028, node 4 2025-05-03T08:43:54.805798Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:54.805810Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:54.805812Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:54.805859Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27311 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:54.875108Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:54.875148Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:54.876685Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:54.878215Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:55.159894Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137472447492248:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:55.159931Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:55.160956Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:55.224523Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137472447492411:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:55.224548Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137472447492416:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:55.224569Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:55.225215Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:55.228667Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7500137472447492418:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:43:55.302288Z node 4 :TX_PROXY ERROR: Actor# [4:7500137472447492491:2767] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_crea ... 72057594046644480 2025-05-03T08:43:56.379296Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137477607763228:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:56.379321Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:56.379336Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137477607763233:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:56.379988Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:56.390378Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7500137477607763235:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:43:56.448426Z node 7 :TX_PROXY ERROR: Actor# [7:7500137477607763314:2774] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:56.456588Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan20jvfh67s04pf3vw1tey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ODllNGQ1NDQtYzMwMjFjMjItZmIzNzZlYjAtYjI4NWNiMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:56.470912Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jtan20nb3kdn9f1d42sq8gx9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ODllNGQ1NDQtYzMwMjFjMjItZmIzNzZlYjAtYjI4NWNiMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:57.079937Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137484455837508:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:57.079958Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000113/r3tmp/tmp7Xo0n0/pdisk_1.dat 2025-05-03T08:43:57.101141Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65347, node 10 2025-05-03T08:43:57.112772Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:57.112784Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:57.112788Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:57.112823Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6543 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:57.180578Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:57.180610Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:57.182133Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:57.183703Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:57.193776Z node 10 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jtan21c94mndyh8acz179pqb, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:44542, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.999262s 2025-05-03T08:43:57.194880Z node 10 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jtan21caay26f03bzk5ndv1r, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:44542, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-05-03T08:43:57.376343Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ExecuteSchemeQueryRequest, traceId# 01jtan21j01y5h92g6b0hnkapf, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:44542, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-05-03T08:43:57.378510Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137484455838454:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:57.378532Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:57.379875Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:57.380820Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:43:57.380823Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:43:57.380852Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:43:57.380852Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:43:57.436545Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:43:57.436545Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:43:57.436572Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:43:57.436581Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:43:57.439299Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jtan21kzb6ayex8n1gw3909p, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:44542, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-05-03T08:43:57.447570Z node 10 :READ_TABLE_API DEBUG: [10:7500137484455838615:2343] Adding quota request to queue ShardId: 0, TxId: 281474976715659 2025-05-03T08:43:57.447587Z node 10 :READ_TABLE_API DEBUG: [10:7500137484455838615:2343] Assign stream quota to Shard 0, Quota 5, TxId 281474976715659 Reserved: 5 of 25, Queued: 0 2025-05-03T08:43:57.447757Z node 10 :READ_TABLE_API DEBUG: [10:7500137484455838615:2343] got stream part, size: 35, RU required: 128 rate limiter absent 2025-05-03T08:43:57.447901Z node 10 :READ_TABLE_API DEBUG: [10:7500137484455838615:2343] Starting inactivity timer for 600.000000s with tag 3 2025-05-03T08:43:57.447916Z node 10 :READ_TABLE_API NOTICE: [10:7500137484455838615:2343] Finish grpc stream, status: 400000 2025-05-03T08:43:57.448734Z node 10 :GRPC_SERVER DEBUG: Got grpc request# DeleteSessionRequest, traceId# 01jtan21m8a3h90vwjf72abkn0, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# unknown, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 2.009300s 2025-05-03T08:43:57.448867Z node 10 :GRPC_SERVER DEBUG: [0x45913f6d7c00] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.448886Z node 10 :GRPC_SERVER DEBUG: [0x45913f6d4f00] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.448924Z node 10 :GRPC_SERVER DEBUG: [0x45913f6e8500] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.448937Z node 10 :GRPC_SERVER DEBUG: [0x45913f6de000] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.448953Z node 10 :GRPC_SERVER DEBUG: [0x45913f6e0300] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.448986Z node 10 :GRPC_SERVER DEBUG: [0x45913f6e4400] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.449002Z node 10 :GRPC_SERVER DEBUG: [0x45913f6dfe00] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.449052Z node 10 :GRPC_SERVER DEBUG: [0x45913f6c0a00] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.449052Z node 10 :GRPC_SERVER DEBUG: [0x45913f6d2c00] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.449083Z node 10 :GRPC_SERVER DEBUG: [0x45913f6e1200] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.449114Z node 10 :GRPC_SERVER DEBUG: [0x45913f6e2b00] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.449117Z node 10 :GRPC_SERVER DEBUG: [0x45913f6dc700] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.449144Z node 10 :GRPC_SERVER DEBUG: [0x45913f6d0400] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.449162Z node 10 :GRPC_SERVER DEBUG: [0x45913f6c3200] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.449177Z node 10 :GRPC_SERVER DEBUG: [0x45913f6dd100] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.449199Z node 10 :GRPC_SERVER DEBUG: [0x45913eccd100] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.449207Z node 10 :GRPC_SERVER DEBUG: [0x45913f6e9400] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 >> YdbYqlClient::TestReadWrongTable [GOOD] >> test_dml.py::TestDML::test_dml[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] >> TGRpcLdapAuthentication::LdapAuthSetIncorrectDomain [GOOD] >> GrpcConnectionStringParserTest::CommonClientSettingsFromConnectionString [GOOD] >> LocalityOperation::LocksFromAnotherTenants+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadWrongTable [GOOD] Test command err: 2025-05-03T08:43:54.339158Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137467536721301:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:54.339177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000106/r3tmp/tmpXWuaHj/pdisk_1.dat 2025-05-03T08:43:54.394040Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3837, node 1 2025-05-03T08:43:54.408667Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:54.408679Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:54.408681Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:54.408722Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22536 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:54.435260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:54.439142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:54.439166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:54.440431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:54.598403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137467536722256:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:54.598418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137467536722251:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:54.598467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:54.599047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-05-03T08:43:54.602204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500137467536722265:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-05-03T08:43:54.665859Z node 1 :TX_PROXY ERROR: Actor# [1:7500137467536722338:2660] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:55.409253Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137473876423086:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:55.409290Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000106/r3tmp/tmpmdAbp7/pdisk_1.dat 2025-05-03T08:43:55.421943Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28916, node 4 2025-05-03T08:43:55.435169Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:55.435182Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:55.435184Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:55.435226Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9111 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:55.509778Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:55.509806Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:55.511453Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:55.512830Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:55.701855Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137473876424030:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:55.701856Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137473876424038:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:55.701876Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:55.702621Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-03T08:43:55.711891Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7500137473876424044:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-03T08:43:55.768768Z node 4 :TX_PROXY ERROR: Actor# [4:7500137473876424115:2653] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:56.414366Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137476408997413:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:56.414385Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000106/r3tmp/tmpQkzi0r/pdisk_1.dat 2025-05-03T08:43:56.427980Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17542, node 7 2025-05-03T08:43:56.442668Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:56.442683Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:56.442685Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:56.442714Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64682 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:56.514801Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:56.514850Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:56.516356Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:56.517873Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:56.693513Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137476408998350:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:56.693533Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:56.696457Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:56.759910Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137476408998513:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:56.759917Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137476408998518:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:56.759940Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:56.760506Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:56.768778Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7500137476408998520:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:43:56.853780Z node 7 :TX_PROXY ERROR: Actor# [7:7500137476408998593:2765] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:56.862031Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan20yqetsva2w31zs1e4ah, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=M2RkYThiYTEtZTQ1YmRlMTYtOTFlMDA4NTYtM2NkNzM1ZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:56.881058Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jtan2129566623wjby3anqr7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzI4MDZjYzEtZWU0NTg1YWItYmEyMGIyNmItNTA0MDZjZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:56.893634Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=M2RkYThiYTEtZTQ1YmRlMTYtOTFlMDA4NTYtM2NkNzM1ZWM=, ActorId: [7:7500137476408998347:2332], ActorState: ExecuteState, TraceId: 01jtan212m37k8jnfacr9xczkh, Create QueryResponse for error on request, msg: 2025-05-03T08:43:57.466735Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137483392077165:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:57.466765Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000106/r3tmp/tmpiQBh0r/pdisk_1.dat 2025-05-03T08:43:57.477976Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28985, node 10 2025-05-03T08:43:57.490274Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:57.490283Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:57.490285Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:57.490339Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12185 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:57.567010Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:57.567037Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:57.568611Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:57.569735Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:57.579527Z node 10 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jtan21rbf2esh5wq95dgrn0j, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:59292, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.999509s 2025-05-03T08:43:57.580278Z node 10 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jtan21rc15vsr20747gec12y, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:59292, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-05-03T08:43:57.783435Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jtan21yq5k2x5mkrtdj63kp6, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:59292, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-05-03T08:43:57.783767Z node 10 :TX_PROXY ERROR: [ReadTable [10:7500137483392078100:2333] TxId# 281474976715658] Navigate request failed for table 'Root/NoTable' 2025-05-03T08:43:57.783795Z node 10 :TX_PROXY ERROR: [ReadTable [10:7500137483392078100:2333] TxId# 281474976715658] RESPONSE Status# ResolveError shard: 0 table: Root/NoTable 2025-05-03T08:43:57.783937Z node 10 :READ_TABLE_API NOTICE: [10:7500137483392078099:2333] Finish grpc stream, status: 400070
: Error: Failed to resolve table Root/NoTable, code: 200400
: Error: Got ResolveError response from TxProxy
: Error: Failed to resolve table Root/NoTable 2025-05-03T08:43:57.784639Z node 10 :GRPC_SERVER DEBUG: [0x17caff6e8500] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.784705Z node 10 :GRPC_SERVER DEBUG: [0x17caff6def00] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.784749Z node 10 :GRPC_SERVER DEBUG: [0x17caff6e7b00] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.784752Z node 10 :GRPC_SERVER DEBUG: [0x17caff6e4e00] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.784789Z node 10 :GRPC_SERVER DEBUG: [0x17caff6dea00] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.784814Z node 10 :GRPC_SERVER DEBUG: [0x17caff6d0400] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.784824Z node 10 :GRPC_SERVER DEBUG: [0x17caff6e3000] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.784853Z node 10 :GRPC_SERVER DEBUG: [0x17caff6e3500] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.784858Z node 10 :GRPC_SERVER DEBUG: [0x17caff6c0500] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.784890Z node 10 :GRPC_SERVER DEBUG: [0x17caff6d7200] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.784893Z node 10 :GRPC_SERVER DEBUG: [0x17caff6d6300] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.784926Z node 10 :GRPC_SERVER DEBUG: [0x17caff6d7700] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.784947Z node 10 :GRPC_SERVER DEBUG: [0x17caff6ccd00] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.784962Z node 10 :GRPC_SERVER DEBUG: [0x17caff6d3100] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.784983Z node 10 :GRPC_SERVER DEBUG: [0x17caff6ce600] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.785002Z node 10 :GRPC_SERVER DEBUG: [0x17cafecc0b00] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-05-03T08:43:57.785020Z node 10 :GRPC_SERVER DEBUG: [0x17caff6d8600] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> TGRpcYdbTest::CreateDeleteYqlSession [GOOD] >> ClientStatsCollector::CounterCacheMiss [GOOD] >> ClientStatsCollector::CounterRetryOperation >> TTableProfileTests::OverwriteExecutionPolicy [GOOD] >> TTableProfileTests::OverwritePartitioningPolicy >> TGRpcYdbTest::ReadTablePg [GOOD] >> YdbYqlClient::CheckDefaultTableSettings2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> YdbYqlClient::RetryOperationTemplate [GOOD] >> YdbYqlClient::RetryOperationSync >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesServerCerts ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::LdapAuthSetIncorrectDomain [GOOD] Test command err: 2025-05-03T08:43:54.936030Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137468648498479:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:54.936055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000fc/r3tmp/tmphHkICo/pdisk_1.dat 2025-05-03T08:43:55.027153Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-03T08:43:55.038137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:55.038173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:55.039312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13904, node 1 2025-05-03T08:43:55.061404Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:55.061431Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:55.061433Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:55.061466Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20052 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:55.106517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:55.784902Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137474724601609:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:55.786396Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000fc/r3tmp/tmpbXNro3/pdisk_1.dat 2025-05-03T08:43:55.803664Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4757, node 4 2025-05-03T08:43:55.837741Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:55.837753Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:55.837755Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:55.837796Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8565 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:55.889971Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:55.890002Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:55.891295Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:55.891322Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:56.622297Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137476816703737:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:56.622318Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000fc/r3tmp/tmp1qx2Kv/pdisk_1.dat 2025-05-03T08:43:56.637729Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20920, node 7 2025-05-03T08:43:56.652311Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:56.652323Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:56.652324Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:56.652364Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8948 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:56.722429Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:56.722464Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:56.723955Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:56.725152Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:57.299221Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137481487408199:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:57.299246Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000fc/r3tmp/tmp3G1jzr/pdisk_1.dat 2025-05-03T08:43:57.314482Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27058, node 10 2025-05-03T08:43:57.333395Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:57.333417Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:57.333419Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:57.333455Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:57.399809Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:57.399839Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:57.401303Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:57.401467Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:57.945884Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137481244630797:2213];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000fc/r3tmp/tmpdVnIDK/pdisk_1.dat 2025-05-03T08:43:57.952496Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:43:57.970024Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9629, node 13 2025-05-03T08:43:57.991845Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:57.991865Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:57.991867Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:57.991911Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23239 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:58.045420Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:58.045444Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:58.046910Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:58.048487Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::CreateDeleteYqlSession [GOOD] Test command err: 2025-05-03T08:43:54.844000Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137470569816975:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:54.844103Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000fe/r3tmp/tmpWqwb8M/pdisk_1.dat 2025-05-03T08:43:54.911611Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28212, node 1 2025-05-03T08:43:54.931160Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:54.931182Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:54.931185Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:54.931219Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:43:54.944436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:54.944475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:54.945841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8183 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:54.964114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:55.663795Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137473443138471:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:55.663897Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000fe/r3tmp/tmpiQCuo8/pdisk_1.dat 2025-05-03T08:43:55.678837Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4355, node 4 2025-05-03T08:43:55.697567Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:55.697580Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:55.697582Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:55.697621Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17130 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:55.769264Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:55.769295Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:55.770205Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:55.770945Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-03T08:43:55.795635Z node 4 :TX_PROXY ERROR: Actor# [4:7500137473443139367:2586] txid# 281474976715658, issues: { message: "Unknown column \'BlaBla\' specified in key column list" severity: 1 } 2025-05-03T08:43:56.313169Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137476079359238:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:56.313265Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000fe/r3tmp/tmpSOTyNw/pdisk_1.dat 2025-05-03T08:43:56.328916Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1893, node 7 2025-05-03T08:43:56.339280Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:56.339293Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:56.339295Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:56.339334Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:56.413314Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:56.413358Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:56.414801Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:56.415370Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000fe/r3tmp/tmp2fC2tS/pdisk_1.dat 2025-05-03T08:43:56.977336Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-03T08:43:56.990663Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9035, node 10 2025-05-03T08:43:57.009466Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:57.009477Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:57.009478Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:57.009527Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26246 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:57.073319Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:57.073367Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:57.074804Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:57.076933Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:57.088680Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:57.153055Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-03T08:43:57.205720Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-05-03T08:43:57.836002Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137480228854276:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:57.836032Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000fe/r3tmp/tmpAe7mCi/pdisk_1.dat 2025-05-03T08:43:57.849634Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10689, node 13 2025-05-03T08:43:57.868245Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:57.868260Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:57.868262Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:57.868310Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:57.936512Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:57.936549Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:57.938051Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:57.939562Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::ReadTablePg [GOOD] Test command err: 2025-05-03T08:43:53.684148Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137466014426921:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:53.684170Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00010b/r3tmp/tmpwxnCvP/pdisk_1.dat 2025-05-03T08:43:53.747387Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5302, node 1 2025-05-03T08:43:53.775542Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:53.775558Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:53.775561Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:53.775604Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:43:53.784741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:53.784773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:53.786582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27670 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:53.820297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:53.837268Z node 1 :TX_PROXY ERROR: Actor# [1:7500137466014427832:2601] txid# 281474976715658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-05-03T08:43:54.506261Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137469277785013:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:54.506378Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00010b/r3tmp/tmphUxWLt/pdisk_1.dat 2025-05-03T08:43:54.523009Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22452, node 4 2025-05-03T08:43:54.541583Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:54.541593Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:54.541594Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:54.541631Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1469 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:54.606570Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:54.606596Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:54.608243Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:54.609852Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:54.850736Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137469277785972:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:54.850742Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137469277785961:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:54.850755Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:54.851481Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-03T08:43:54.855277Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7500137469277785975:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-03T08:43:54.947966Z node 4 :TX_PROXY ERROR: Actor# [4:7500137469277786048:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:55.568820Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137472445373952:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:55.568983Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00010b/r3tmp/tmp1zRsLm/pdisk_1.dat 2025-05-03T08:43:55.585586Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26422, node 7 2025-05-03T08:43:55.605885Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:55.605900Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:55.605902Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:55.605963Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13010 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:55.669557Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:55.669585Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:55.671167Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:55.674396Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:55.878119Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137472445374910:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:55.878144Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137472445374899:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:55.878163Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:55.878827Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-03T08:43:55.885096Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7500137472445374913:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: ... : 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:57.794292Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:57.794315Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:57.795807Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:57.797432Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:57.808307Z node 13 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01jtan21zgadmssvq16q528qbs, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:47252, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-05-03T08:43:57.809137Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:57.809972Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:43:57.809999Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:43:57.810002Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:43:57.810011Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:43:57.865930Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:43:57.865966Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:43:57.865970Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:43:57.865983Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:43:57.867814Z node 13 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jtan221b9gbjvvmpmd4xfqfn, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:47252, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-05-03T08:43:58.002335Z node 13 :GRPC_SERVER DEBUG: Got grpc request# ExecuteDataQueryRequest, traceId# 01jtan225j5zntmbfv7jc8vgmb, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:47252, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-05-03T08:43:58.002612Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137487681814499:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:58.002628Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137487681814509:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:58.002635Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:58.003360Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:58.004513Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:43:58.004554Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:43:58.004562Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:43:58.004574Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:43:58.006745Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:43:58.006765Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:43:58.006774Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:43:58.006783Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:43:58.007593Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7500137487681814513:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:43:58.101556Z node 13 :TX_PROXY ERROR: Actor# [13:7500137487681814585:2754] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:58.115965Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan225j5zntmbfv7jc8vgmb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZWM4NTM5OTMtOTI4OGMzM2EtNDVhZDQzNmMtYjRlYWE3Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:58.119366Z node 13 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jtan2297b3yh005m6qpcrtey, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:47252, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-05-03T08:43:58.119486Z node 13 :READ_TABLE_API NOTICE: [13:7500137487681814629:2348] Finish grpc stream, status: 400010 2025-05-03T08:43:58.120211Z node 13 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jtan22985mjnq5wn5jntd3ab, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:47252, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-05-03T08:43:58.124002Z node 13 :READ_TABLE_API DEBUG: [13:7500137487681814630:2349] Adding quota request to queue ShardId: 0, TxId: 281474976715662 2025-05-03T08:43:58.124016Z node 13 :READ_TABLE_API DEBUG: [13:7500137487681814630:2349] Assign stream quota to Shard 0, Quota 5, TxId 281474976715662 Reserved: 5 of 25, Queued: 0 2025-05-03T08:43:58.124292Z node 13 :READ_TABLE_API DEBUG: [13:7500137487681814630:2349] got stream part, size: 246, RU required: 128 rate limiter absent 2025-05-03T08:43:58.124423Z node 13 :READ_TABLE_API DEBUG: [13:7500137487681814630:2349] Starting inactivity timer for 600.000000s with tag 3 2025-05-03T08:43:58.124548Z node 13 :READ_TABLE_API NOTICE: [13:7500137487681814630:2349] Finish grpc stream, status: 400000 2025-05-03T08:43:58.125066Z node 13 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jtan229ddwdnacs8anvesyr1, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:47252, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-05-03T08:43:58.134027Z node 13 :READ_TABLE_API DEBUG: [13:7500137487681814652:2351] Adding quota request to queue ShardId: 0, TxId: 281474976715664 2025-05-03T08:43:58.134057Z node 13 :READ_TABLE_API DEBUG: [13:7500137487681814652:2351] Assign stream quota to Shard 0, Quota 5, TxId 281474976715664 Reserved: 5 of 25, Queued: 0 2025-05-03T08:43:58.134220Z node 13 :READ_TABLE_API DEBUG: [13:7500137487681814652:2351] got stream part, size: 84, RU required: 128 rate limiter absent 2025-05-03T08:43:58.134380Z node 13 :READ_TABLE_API DEBUG: [13:7500137487681814652:2351] Starting inactivity timer for 600.000000s with tag 3 2025-05-03T08:43:58.134493Z node 13 :READ_TABLE_API NOTICE: [13:7500137487681814652:2351] Finish grpc stream, status: 400000 2025-05-03T08:43:58.134973Z node 13 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jtan229pfchjps0tk3070ac0, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:47252, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-05-03T08:43:58.140645Z node 13 :READ_TABLE_API DEBUG: [13:7500137487681814674:2353] Adding quota request to queue ShardId: 0, TxId: 281474976715666 2025-05-03T08:43:58.140673Z node 13 :READ_TABLE_API DEBUG: [13:7500137487681814674:2353] Assign stream quota to Shard 0, Quota 5, TxId 281474976715666 Reserved: 5 of 25, Queued: 0 2025-05-03T08:43:58.140890Z node 13 :READ_TABLE_API DEBUG: [13:7500137487681814674:2353] got stream part, size: 210, RU required: 128 rate limiter absent 2025-05-03T08:43:58.140994Z node 13 :READ_TABLE_API DEBUG: [13:7500137487681814674:2353] Starting inactivity timer for 600.000000s with tag 3 2025-05-03T08:43:58.141107Z node 13 :READ_TABLE_API NOTICE: [13:7500137487681814674:2353] Finish grpc stream, status: 400000 2025-05-03T08:43:58.141641Z node 13 :GRPC_SERVER DEBUG: [0x14c6ff6d1300] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-05-03T08:43:58.141640Z node 13 :GRPC_SERVER DEBUG: [0x14c6ff6e0d00] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-05-03T08:43:58.141702Z node 13 :GRPC_SERVER DEBUG: [0x14c6ff6d7200] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-05-03T08:43:58.141722Z node 13 :GRPC_SERVER DEBUG: [0x14c6ff6da900] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-05-03T08:43:58.141757Z node 13 :GRPC_SERVER DEBUG: [0x14c6ff6c0500] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-05-03T08:43:58.141761Z node 13 :GRPC_SERVER DEBUG: [0x14c6ff6d0400] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-05-03T08:43:58.141789Z node 13 :GRPC_SERVER DEBUG: [0x14c6ff6c2800] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-05-03T08:43:58.141798Z node 13 :GRPC_SERVER DEBUG: [0x14c6ff6d9000] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-05-03T08:43:58.141818Z node 13 :GRPC_SERVER DEBUG: [0x14c6ff6d5900] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-05-03T08:43:58.141828Z node 13 :GRPC_SERVER DEBUG: [0x14c6ff6d1d00] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-05-03T08:43:58.141867Z node 13 :GRPC_SERVER DEBUG: [0x14c6ff6d4a00] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-05-03T08:43:58.141888Z node 13 :GRPC_SERVER DEBUG: [0x14c6ff6d0900] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-05-03T08:43:58.141903Z node 13 :GRPC_SERVER DEBUG: [0x14c6ff6e1700] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-05-03T08:43:58.141945Z node 13 :GRPC_SERVER DEBUG: [0x14c6ff6ce600] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-05-03T08:43:58.141949Z node 13 :GRPC_SERVER DEBUG: [0x14c6ff6e1200] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-05-03T08:43:58.141978Z node 13 :GRPC_SERVER DEBUG: [0x14c6ff6e8000] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-05-03T08:43:58.141978Z node 13 :GRPC_SERVER DEBUG: [0x14c6fecc9a00] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 >> YdbYqlClient::TestDecimal [GOOD] >> YdbYqlClient::TestBusySession >> YdbTableBulkUpsert::SyncIndexShouldSucceed [GOOD] >> YdbTableBulkUpsert::Overload >> YdbOlapStore::LogPagingBefore [GOOD] >> YdbOlapStore::LogPagingAfter >> TYqlDateTimeTests::SimpleUpsertSelect ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::CheckDefaultTableSettings2 [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000109/r3tmp/tmpBWyP1u/pdisk_1.dat TServer::EnableGrpc on GrpcPort 61667, node 1 TClient is connected to server localhost:14351 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-03T08:43:54.558709Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137469008685928:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:54.558730Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000109/r3tmp/tmpJZHR7S/pdisk_1.dat 2025-05-03T08:43:54.577191Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8374, node 4 2025-05-03T08:43:54.595995Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:54.596007Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:54.596010Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:54.596046Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:54.659319Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:54.659339Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:54.661694Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:54.662377Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:54.938867Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137469008686872:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:54.938909Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:54.980496Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:55.044009Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137473303654336:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:55.044032Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:55.044075Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137473303654341:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:55.044688Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:55.047303Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7500137473303654343:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:43:55.116658Z node 4 :TX_PROXY ERROR: Actor# [4:7500137473303654418:2776] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:55.131933Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan1z934z98mdjsx1w4b0c7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZDhlZjcxNDItNDhhYTM0YjctZmQ2OGQ0ODctMjNlYTMyNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:55.142760Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-05-03T08:43:55.156707Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-05-03T08:43:55.244046Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-03T08:43:55.834106Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137471912311448:2076];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:55.834195Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000109/r3tmp/tmpZMUmM0/pdisk_1.dat 2025-05-03T08:43:55.853179Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27901, node 7 2025-05-03T08:43:55.865518Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:55.865528Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:55.865531Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:55.865565Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:55.938316Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:55.938343Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:55.939084Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:55.939611Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-03T08:43:56.115479Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137476207279690:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:56.115503Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:56.118725Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:56.179384Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137476207279853:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:56.179410Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:56.179413Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137476207279858:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:56.180028Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:56.187779Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7500137476207279860:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:43:56.283935Z node 7 :TX_PROXY ERROR: Actor# [7:7500137476207279937:2771] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:56.291840Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan20ck05v31y2n2zgw3k7p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZTc0YzcxMjYtOWM3MzYwZjgtYmIwZDc0MDktNzVkZTBlNjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:56.299186Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-05-03T08:43:56.317849Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-05-03T08:43:56.899213Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137476791799968:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:56.899255Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000109/r3tmp/tmpLVxAJn/pdisk_1.dat 2025-05-03T08:43:56.919489Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32363, node 10 2025-05-03T08:43:56.931647Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:56.931662Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:56.931664Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:56.931698Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:56.999701Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:56.999726Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:57.001396Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:57.004570Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:57.245511Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:57.754960Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137480514860563:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:57.754979Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000109/r3tmp/tmpx9EM5T/pdisk_1.dat 2025-05-03T08:43:57.771560Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20764, node 13 2025-05-03T08:43:57.778472Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:57.778486Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:57.778488Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:57.778534Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63224 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:57.855231Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:57.855268Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:57.856613Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:57.858385Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:58.079379Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> YdbOlapStore::ManyTables >> TGRpcYdbTest::ExecuteQueryImplicitSession >> TGRpcNewClient::TestAuth >> TGRpcClientLowTest::SimpleRequest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvideIncorrectCerts >> ClientStatsCollector::CounterRetryOperation [GOOD] >> ClientStatsCollector::ExternalMetricRegistryByRawPtr >> YdbYqlClient::TestBusySession [GOOD] >> YdbYqlClient::TestConstraintViolation >> YdbYqlClient::TestYqlIssues >> YdbQueryService::TestCreateAndAttachSession |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] >> TYqlDateTimeTests::SimpleUpsertSelect [GOOD] >> TYqlDateTimeTests::DatetimeKey >> TGRpcYdbTest::ExecuteQueryImplicitSession [GOOD] >> TGRpcYdbTest::ExecuteQueryWithUuid >> TGRpcNewClient::TestAuth [GOOD] >> TGRpcNewClient::YqlQueryWithParams >> YdbOlapStore::LogExistingRequest [GOOD] >> YdbOlapStore::LogExistingUserId >> TTableProfileTests::OverwritePartitioningPolicy [GOOD] >> TTableProfileTests::OverwriteStoragePolicy >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesServerCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedPrivatekey >> TGRpcClientLowTest::SimpleRequest [GOOD] >> TGRpcClientLowTest::SimpleRequestDummyService >> LocalityOperation::LocksFromAnotherTenants+UseSink [GOOD] >> LocalityOperation::LocksFromAnotherTenants-UseSink >> ClientStatsCollector::ExternalMetricRegistryByRawPtr [GOOD] >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr >> YdbQueryService::TestCreateAndAttachSession [GOOD] >> YdbQueryService::TestAttachTwice >> YdbYqlClient::TestConstraintViolation [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithValidCredentials >> YdbYqlClient::TestYqlIssues [GOOD] >> YdbYqlClient::TestYqlSessionClosed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestConstraintViolation [GOOD] Test command err: 2025-05-03T08:43:56.903067Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137479441840949:2211];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:56.903259Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f2/r3tmp/tmpsTqSRX/pdisk_1.dat 2025-05-03T08:43:56.969324Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23780, node 1 2025-05-03T08:43:56.992317Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:56.992330Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:56.992332Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:56.992370Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:43:56.999818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:56.999844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:57.003498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9280 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:57.035833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:57.202190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137483736809036:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:57.202216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:57.240499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:57.305698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137483736809200:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:57.305720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:57.305725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137483736809205:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:57.306428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:57.309571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500137483736809207:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:43:57.389856Z node 1 :TX_PROXY ERROR: Actor# [1:7500137483736809276:2766] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:57.402789Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan21fsa1mfv4f112m156wc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTg1NDFlZGEtZDIzNWM0NjktYzNkYWY3MzUtOTdhYmExYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:57.422749Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jtan21jzemctm6m97k43edby, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTg1NDFlZGEtZDIzNWM0NjktYzNkYWY3MzUtOTdhYmExYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:57.940668Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137480664838992:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:57.940797Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f2/r3tmp/tmpovoPnN/pdisk_1.dat 2025-05-03T08:43:57.954844Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65227, node 4 2025-05-03T08:43:57.964769Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:57.964792Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:57.964793Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:57.964842Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8850 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:58.041253Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:58.041283Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:58.042690Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:58.044397Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:58.237379Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137484959807227:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:58.237401Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:58.237415Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137484959807238:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:58.238089Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-03T08:43:58.241770Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7500137484959807241:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-03T08:43:58.319916Z node 4 :TX_PROXY ERROR: Actor# [4:7500137484959807316:2649] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:58.980561Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137485498116792:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:58.980589Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f2/r3tmp/tmpEzwxR5/pdisk_1.dat 2025-05-03T08:43:58.995825Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14913, node 7 2025-05-03T08:43:59.010392Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:59.010404Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:59.010406Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:59.010436Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TC ... g previous query completion proxyRequestId: 8 2025-05-03T08:43:59.333599Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=MzQ0YzlkZmUtODc1YzFmNWUtYTIxZTQ3ZDctNjc1NTg1ODU=, ActorId: [7:7500137489793085011:2332], ActorState: ExecuteState, TraceId: 01jtan23f329zp56qtfd9h2axg, Reply query error, msg: Pending previous query completion proxyRequestId: 9 2025-05-03T08:43:59.333604Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=MzQ0YzlkZmUtODc1YzFmNWUtYTIxZTQ3ZDctNjc1NTg1ODU=, ActorId: [7:7500137489793085011:2332], ActorState: ExecuteState, TraceId: 01jtan23f329zp56qtfd9h2axg, Reply query error, msg: Pending previous query completion proxyRequestId: 10 2025-05-03T08:43:59.333708Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137489793085091:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:59.333729Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:59.333774Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=MzQ0YzlkZmUtODc1YzFmNWUtYTIxZTQ3ZDctNjc1NTg1ODU=, ActorId: [7:7500137489793085011:2332], ActorState: ExecuteState, TraceId: 01jtan23f329zp56qtfd9h2axg, Reply query error, msg: Pending previous query completion proxyRequestId: 11 2025-05-03T08:43:59.333783Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137489793085096:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:59.333788Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=MzQ0YzlkZmUtODc1YzFmNWUtYTIxZTQ3ZDctNjc1NTg1ODU=, ActorId: [7:7500137489793085011:2332], ActorState: ExecuteState, TraceId: 01jtan23f329zp56qtfd9h2axg, Reply query error, msg: Pending previous query completion proxyRequestId: 12 2025-05-03T08:43:59.333799Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:59.336382Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7500137489793085047:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-03T08:43:59.398628Z node 7 :TX_PROXY ERROR: Actor# [7:7500137489793085142:2667] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:00.056263Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137496482986306:2212];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f2/r3tmp/tmpKuSwm9/pdisk_1.dat 2025-05-03T08:44:00.057097Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:44:00.069706Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12559, node 10 2025-05-03T08:44:00.086021Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:00.086034Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:00.086036Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:00.086082Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31278 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:00.155945Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:00.155974Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:00.157521Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:00.159382Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:00.357711Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137496482987096:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.357733Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.360684Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:00.420328Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137496482987258:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.420356Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.420358Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137496482987263:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.420866Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:44:00.429338Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7500137496482987265:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:44:00.502845Z node 10 :TX_PROXY ERROR: Actor# [10:7500137496482987338:2767] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:00.510270Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan24h49zy61pesx83mpr2g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZjE0MmUzMjUtZTJhZTVkMTMtZGVhMGE1MWUtZTFhNGJkODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:00.519912Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jtan24m1bevts08aqkqghjwm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZjE0MmUzMjUtZTJhZTVkMTMtZGVhMGE1MWUtZTFhNGJkODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:00.520572Z node 10 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2025-05-03T08:44:00.522069Z node 10 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-05-03T08:44:00.522118Z node 10 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-05-03T08:44:00.522175Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7500137496482987396:2332], Table: `Root/Test` ([72057594046644480:2:1]), SessionActorId: [10:7500137496482987078:2332]Got CONSTRAINT VIOLATION for table `Root/Test`. ShardID=72075186224037888, Sink=[10:7500137496482987396:2332].{
: Error: Conflict with existing key., code: 2012 } 2025-05-03T08:44:00.522268Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7500137496482987389:2332], SessionActorId: [10:7500137496482987078:2332], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `Root/Test`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[10:7500137496482987078:2332]. isRollback=0 2025-05-03T08:44:00.522318Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ZjE0MmUzMjUtZTJhZTVkMTMtZGVhMGE1MWUtZTFhNGJkODM=, ActorId: [10:7500137496482987078:2332], ActorState: ExecuteState, TraceId: 01jtan24m1bevts08aqkqghjwm, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [10:7500137496482987390:2332] from: [10:7500137496482987389:2332] 2025-05-03T08:44:00.522336Z node 10 :KQP_EXECUTER ERROR: ActorId: [10:7500137496482987390:2332] TxId: 281474976715662. Ctx: { TraceId: 01jtan24m1bevts08aqkqghjwm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZjE0MmUzMjUtZTJhZTVkMTMtZGVhMGE1MWUtZTFhNGJkODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `Root/Test`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-05-03T08:44:00.522379Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ZjE0MmUzMjUtZTJhZTVkMTMtZGVhMGE1MWUtZTFhNGJkODM=, ActorId: [10:7500137496482987078:2332], ActorState: ExecuteState, TraceId: 01jtan24m1bevts08aqkqghjwm, Create QueryResponse for error on request, msg: >> TGRpcClientLowTest::SimpleRequestDummyService [GOOD] >> TGRpcClientLowTest::MultipleSimpleRequests >> TGRpcYdbTest::ExecuteQueryWithUuid [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest >> TGRpcNewClient::YqlQueryWithParams [GOOD] >> TGRpcNewClient::YqlExplainDataQuery >> TYqlDateTimeTests::DatetimeKey [GOOD] >> TYqlDateTimeTests::TimestampKey ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvideIncorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideAnyCerts >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] >> YdbQueryService::TestAttachTwice [GOOD] >> YdbQueryService::TestForbidExecuteWithoutAttach >> YdbYqlClient::TestYqlSessionClosed [GOOD] >> YdbYqlClient::TestYqlLongSessionPrepareError >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr [GOOD] >> YdbOlapStore::LogPagingAfter [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithValidCredentials [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidSearchFilter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2025-05-03T08:43:42.433412Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137419388183148:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:42.433490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:43:42.436616Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500137415944143485:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:42.436808Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00019a/r3tmp/tmpHtetlw/pdisk_1.dat 2025-05-03T08:43:42.463248Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-05-03T08:43:42.469461Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-05-03T08:43:42.492594Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61115, node 1 2025-05-03T08:43:42.510575Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/bb6b/00019a/r3tmp/yandexW3qUbV.tmp 2025-05-03T08:43:42.510585Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/bb6b/00019a/r3tmp/yandexW3qUbV.tmp 2025-05-03T08:43:42.510641Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/bb6b/00019a/r3tmp/yandexW3qUbV.tmp 2025-05-03T08:43:42.510688Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:43:42.515635Z INFO: TTestServer started on Port 5102 GrpcPort 61115 TClient is connected to server localhost:5102 PQClient connected to localhost:61115 === TenantModeEnabled() = 0 === Init PQ - start server on port 61115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-03T08:43:42.534178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:42.534205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:42.536083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:42.564683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:42.564721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:42.566156Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-03T08:43:42.566397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:42.576613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-03T08:43:42.576684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:42.576749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-05-03T08:43:42.576799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-03T08:43:42.576813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:42.577538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-05-03T08:43:42.577559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-05-03T08:43:42.577607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:42.577618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-05-03T08:43:42.577620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-05-03T08:43:42.577624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-05-03T08:43:42.577969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:42.577982Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-05-03T08:43:42.577996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-05-03T08:43:42.578098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-03T08:43:42.578107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-05-03T08:43:42.578110Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-03T08:43:42.578410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:42.578419Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:42.578422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-05-03T08:43:42.578433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-05-03T08:43:42.579178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-03T08:43:42.579570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-05-03T08:43:42.579619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2025-05-03T08:43:42.580109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1746261822627, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-03T08:43:42.580143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1746261822627 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-05-03T08:43:42.580155Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-05-03T08:43:42.580212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-05-03T08:43:42.580226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-05-03T08:43:42.580254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-05-03T08:43:42.580262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-05-03T08:43:42.580635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-03T08:43:42.580644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-05-03T08:43:42.580684Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-03T08:43:42.580692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500137419388183735:2374], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2025-05-03T08:43:42.580699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:42.580703Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-05-03T08:43:42.580715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-05-03T08:43:42.580722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-05-03T08:43:42.580726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-05-03T08:43:42.580727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-05-03T08:43:42.580730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 1/1, is published: false 2025-05-03T08:43:42.580733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-05-03T08:43:42.580736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-05-03T08:43:42.580738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2025-05-03T08:43:42.580745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathI ... _READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/cli_5_1_6532247407015118294_v1" (Sender=[5:7500137499213718479:2612], Pipe=[5:7500137499213718482:2612], Partitions=[], ActiveFamilyCount=0) 2025-05-03T08:44:01.504942Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] consumer cli family 1 status Active partitions [0] session "shared/cli_5_1_6532247407015118294_v1" sender [5:7500137499213718479:2612] lock partition 0 for ReadingSession "shared/cli_5_1_6532247407015118294_v1" (Sender=[5:7500137499213718479:2612], Pipe=[5:7500137499213718482:2612], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-05-03T08:44:01.504960Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-05-03T08:44:01.504969Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing duration: 0.000045s 2025-05-03T08:44:01.505215Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 assign: record# { Partition: 0 TabletId: 72075186224037897 Topic: "rt3.dc1--topic1" Generation: 1 Step: 1 Session: "shared/cli_5_1_6532247407015118294_v1" ClientId: "cli" PipeClient { RawX1: 7500137499213718482 RawX2: 4503621102209588 } Path: "/Root/PQ/rt3.dc1--topic1" } 2025-05-03T08:44:01.505243Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2025-05-03T08:44:01.505299Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1, pipe: [5:7500137499213718484:2615] 2025-05-03T08:44:01.505334Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/cli_5_1_6532247407015118294_v1:1 with generation 1 2025-05-03T08:44:01.506640Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 SizeLag: 409 WriteTimestampEstimateMS: 1746261841503 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-05-03T08:44:01.506656Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2025-05-03T08:44:01.506675Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 sending to client partition status 2025-05-03T08:44:01.506937Z :INFO: [] [] [207e2ffa-c96660c1-9f5ea803-ac102f86] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (NULL) 2025-05-03T08:44:01.507102Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2025-05-03T08:44:01.507149Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2025-05-03T08:44:01.507170Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-05-03T08:44:01.507172Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2025-05-03T08:44:01.507186Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 0, sizeLag# 409 2025-05-03T08:44:01.507188Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1TEvPartitionReady. Aval parts: 1 2025-05-03T08:44:01.507194Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 performing read request: guid# 14703047-740d09b1-646bb834-d10879d3, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 490, partitionsAsked# 1, maxTimeLag# 0ms 2025-05-03T08:44:01.507219Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 490 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid 14703047-740d09b1-646bb834-d10879d3 2025-05-03T08:44:01.507473Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1746261841402 CreateTimestampMS: 1746261841402 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1746261841403 CreateTimestampMS: 1746261841402 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1746261841403 CreateTimestampMS: 1746261841402 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 43 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 0 } 2025-05-03T08:44:01.507508Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset3 2025-05-03T08:44:01.507521Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 14703047-740d09b1-646bb834-d10879d3 has messages 1 2025-05-03T08:44:01.507558Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 read done: guid# 14703047-740d09b1-646bb834-d10879d3, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 371 2025-05-03T08:44:01.507575Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 response to read: guid# 14703047-740d09b1-646bb834-d10879d3 2025-05-03T08:44:01.507671Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 Process answer. Aval parts: 0 2025-05-03T08:44:01.507765Z :DEBUG: [] [] [207e2ffa-c96660c1-9f5ea803-ac102f86] [] Got ReadResponse, serverBytesSize = 371, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428429 2025-05-03T08:44:01.507794Z :DEBUG: [] [] [207e2ffa-c96660c1-9f5ea803-ac102f86] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428429 2025-05-03T08:44:01.507871Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2025-05-03T08:44:01.507887Z :DEBUG: [] [] [207e2ffa-c96660c1-9f5ea803-ac102f86] [] Returning serverBytesSize = 371 to budget 2025-05-03T08:44:01.507893Z :DEBUG: [] [] [207e2ffa-c96660c1-9f5ea803-ac102f86] [] In ContinueReadingDataImpl, ReadSizeBudget = 371, ReadSizeServerDelta = 52428429 2025-05-03T08:44:01.508002Z :DEBUG: [] [] [207e2ffa-c96660c1-9f5ea803-ac102f86] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-05-03T08:44:01.508046Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-05-03T08:44:01.508055Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-05-03T08:44:01.508060Z :DEBUG: [] Take Data. Partition 0. Read: {1, 1} (2-2) 2025-05-03T08:44:01.508069Z :DEBUG: [] [] [207e2ffa-c96660c1-9f5ea803-ac102f86] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2025-05-03T08:44:01.508076Z :DEBUG: [] [] [207e2ffa-c96660c1-9f5ea803-ac102f86] [] Returning serverBytesSize = 0 to budget 2025-05-03T08:44:01.508100Z :INFO: [] [] [207e2ffa-c96660c1-9f5ea803-ac102f86] Closing read session. Close timeout: 0.000000s 2025-05-03T08:44:01.508107Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2025-05-03T08:44:01.508113Z :INFO: [] [] [207e2ffa-c96660c1-9f5ea803-ac102f86] Counters: { Errors: 0 CurrentSessionLifetimeMs: 5 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-05-03T08:44:01.508131Z :NOTICE: [] [] [207e2ffa-c96660c1-9f5ea803-ac102f86] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-05-03T08:44:01.508136Z :DEBUG: [] [] [207e2ffa-c96660c1-9f5ea803-ac102f86] [] Abort session to cluster 2025-05-03T08:44:01.508128Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 grpc read done: success# 1, data# { read_request { bytes_size: 371 } } 2025-05-03T08:44:01.508174Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 got read request: guid# 3adfc53e-c49d5c4d-bc797918-6fef6ad7 2025-05-03T08:44:01.508209Z :NOTICE: [] [] [207e2ffa-c96660c1-9f5ea803-ac102f86] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-05-03T08:44:01.508471Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 grpc read done: success# 0, data# { } 2025-05-03T08:44:01.508481Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 grpc read failed 2025-05-03T08:44:01.508484Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 closed 2025-05-03T08:44:01.508597Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_6532247407015118294_v1 is DEAD 2025-05-03T08:44:01.508678Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/cli_5_1_6532247407015118294_v1 2025-05-03T08:44:01.508743Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] pipe [5:7500137499213718482:2612] disconnected; active server actors: 1 2025-05-03T08:44:01.508752Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037898][rt3.dc1--topic1] pipe [5:7500137499213718482:2612] client cli disconnected session shared/cli_5_1_6532247407015118294_v1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr [GOOD] Test command err: 2025-05-03T08:43:56.589221Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137478633380497:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:56.589238Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f6/r3tmp/tmpGhmSQx/pdisk_1.dat 2025-05-03T08:43:56.641921Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11119, node 1 2025-05-03T08:43:56.659060Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:56.659071Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:56.659072Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:56.659113Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32253 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:56.682075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:56.689778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:56.689808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:56.691389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:56.865197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137478633381457:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:56.865236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137478633381446:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:56.865254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:56.865972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-05-03T08:43:56.870351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500137478633381460:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-05-03T08:43:56.923536Z node 1 :TX_PROXY ERROR: Actor# [1:7500137478633381536:2662] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:56.982066Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710660. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=1&id=ZGE0ZWUwOTItNTFkMDQzN2YtMzZmMTdkNTQtNDAwNWNhYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-05-03T08:43:57.649142Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137482366168505:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:57.649160Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f6/r3tmp/tmp5ymw1Z/pdisk_1.dat 2025-05-03T08:43:57.666239Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11454, node 4 2025-05-03T08:43:57.686823Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:57.686837Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:57.686838Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:57.686882Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26022 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:57.749813Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:57.749851Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:57.751067Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:57.752309Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:57.944000Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137482366169464:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:57.944017Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137482366169453:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:57.944035Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:57.944771Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-03T08:43:57.948442Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7500137482366169467:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-03T08:43:58.002219Z node 4 :TX_PROXY ERROR: Actor# [4:7500137486661136834:2657] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:58.669671Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137486697965031:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:58.669690Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f6/r3tmp/tmpF0Jkav/pdisk_1.dat 2025-05-03T08:43:58.682148Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13421, node 7 2025-05-03T08:43:58.702637Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:58.702650Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:58.702652Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:58.702701Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2 ... . Database not set, use /Root 2025-05-03T08:43:59.300347Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jtan23e43zaqjqs5m506nmnk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OGE1MDY2NTEtYzE5YWExOTEtY2QwYjgzMy1lMGJjYTg5Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:59.300747Z node 7 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=24; 2025-05-03T08:43:59.300855Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7500137490992933834:2358], Table: `Root/names` ([72057594046644480:2:1]), SessionActorId: [7:7500137490992933545:2358]Got LOCKS BROKEN for table `Root/names`. ShardID=72075186224037888, Sink=[7:7500137490992933834:2358].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-05-03T08:43:59.300878Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7500137490992933795:2358], SessionActorId: [7:7500137490992933545:2358], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `Root/names`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[7:7500137490992933545:2358]. isRollback=0 2025-05-03T08:43:59.300932Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=OGE1MDY2NTEtYzE5YWExOTEtY2QwYjgzMy1lMGJjYTg5Mw==, ActorId: [7:7500137490992933545:2358], ActorState: ExecuteState, TraceId: 01jtan23e43zaqjqs5m506nmnk, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [7:7500137490992933828:2358] from: [7:7500137490992933795:2358] 2025-05-03T08:43:59.300955Z node 7 :KQP_EXECUTER ERROR: ActorId: [7:7500137490992933828:2358] TxId: 281474976715678. Ctx: { TraceId: 01jtan23e43zaqjqs5m506nmnk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OGE1MDY2NTEtYzE5YWExOTEtY2QwYjgzMy1lMGJjYTg5Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `Root/names`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-05-03T08:43:59.300991Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=OGE1MDY2NTEtYzE5YWExOTEtY2QwYjgzMy1lMGJjYTg5Mw==, ActorId: [7:7500137490992933545:2358], ActorState: ExecuteState, TraceId: 01jtan23e43zaqjqs5m506nmnk, Create QueryResponse for error on request, msg: 2025-05-03T08:43:59.941336Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137491179803442:2218];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f6/r3tmp/tmp4zDWnC/pdisk_1.dat 2025-05-03T08:43:59.943242Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:43:59.952071Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6197, node 10 2025-05-03T08:43:59.972962Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:59.972974Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:59.972976Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:59.973041Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16044 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:00.040302Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:00.040333Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:00.041858Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:00.043070Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:00.262628Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137495474771524:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.262648Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137495474771513:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.262697Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.263235Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-03T08:44:00.266767Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7500137495474771527:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-03T08:44:00.350046Z node 10 :TX_PROXY ERROR: Actor# [10:7500137495474771602:2646] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:01.002877Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137499202532866:2077];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:01.002961Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f6/r3tmp/tmpGqXzJz/pdisk_1.dat 2025-05-03T08:44:01.016804Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64106, node 13 2025-05-03T08:44:01.046014Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:01.046041Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:01.046043Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:01.046111Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:01.103261Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:01.103291Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:01.104657Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:01.105844Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:01.339944Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137499202533797:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:01.339964Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137499202533808:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:01.339970Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:01.340584Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-03T08:44:01.344278Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7500137499202533811:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-03T08:44:01.443237Z node 13 :TX_PROXY ERROR: Actor# [13:7500137499202533888:2654] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedPrivatekey [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_EmptyAllowedSids >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_pretty ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2025-05-03T08:43:40.740364Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137408935002028:2211];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:40.740498Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:43:40.743920Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500137411053200081:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:40.743952Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0001b4/r3tmp/tmpEpi247/pdisk_1.dat 2025-05-03T08:43:40.787516Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-05-03T08:43:40.792517Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-05-03T08:43:40.807938Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13397, node 1 2025-05-03T08:43:40.831591Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/bb6b/0001b4/r3tmp/yandexYWqMoW.tmp 2025-05-03T08:43:40.831602Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/bb6b/0001b4/r3tmp/yandexYWqMoW.tmp 2025-05-03T08:43:40.831655Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/bb6b/0001b4/r3tmp/yandexYWqMoW.tmp 2025-05-03T08:43:40.831670Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:43:40.834936Z INFO: TTestServer started on Port 19510 GrpcPort 13397 TClient is connected to server localhost:19510 PQClient connected to localhost:13397 === TenantModeEnabled() = 0 === Init PQ - start server on port 13397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:40.879126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:40.879151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:40.879407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:40.879430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:40.885570Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-05-03T08:43:40.885624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:40.885945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:40.891711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-03T08:43:40.891783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:40.891858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-05-03T08:43:40.891905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-03T08:43:40.891920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:40.892708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-05-03T08:43:40.892736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-05-03T08:43:40.892807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:40.892820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-05-03T08:43:40.892828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2025-05-03T08:43:40.892831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 waiting... 2025-05-03T08:43:40.893306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:40.893318Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2025-05-03T08:43:40.893331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2025-05-03T08:43:40.893505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2025-05-03T08:43:40.893515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2025-05-03T08:43:40.893519Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2025-05-03T08:43:40.893774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:40.893783Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:40.893787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2025-05-03T08:43:40.893799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2025-05-03T08:43:40.894482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-03T08:43:40.894919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2025-05-03T08:43:40.894972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2025-05-03T08:43:40.895614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1746261820940, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-03T08:43:40.895669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1746261820940 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-05-03T08:43:40.895681Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-05-03T08:43:40.895751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2025-05-03T08:43:40.895765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-05-03T08:43:40.895802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-05-03T08:43:40.895816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-05-03T08:43:40.896242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-03T08:43:40.896253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-05-03T08:43:40.896301Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-03T08:43:40.896309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500137408935002484:2382], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2025-05-03T08:43:40.896317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:40.896321Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2025-05-03T08:43:40.896334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-05-03T08:43:40.896342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-05-03T08:43:40.896346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-05-03T08:43:40.896347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-05-03T08:43:40.896351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2025-05-03T08:43:40.896354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-05-03T08:43:40.896357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720657:0 2025-05-03T08:43:40.896359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720657:0 2025-05-03T08:43:40.896368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pat ... Active partitions [0] session "shared/cli_5_1_5101852589026736340_v1" sender [5:7500137500840018752:2568] lock partition 0 for ReadingSession "shared/cli_5_1_5101852589026736340_v1" (Sender=[5:7500137500840018752:2568], Pipe=[5:7500137500840018755:2568], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-05-03T08:44:01.660340Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-05-03T08:44:01.660344Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 grpc read done: success# 1, data# { read_request { bytes_size: 52428800 } } 2025-05-03T08:44:01.660350Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing duration: 0.000038s 2025-05-03T08:44:01.660593Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 assign: record# { Partition: 0 TabletId: 72075186224037897 Topic: "rt3.dc1--topic1" Generation: 1 Step: 1 Session: "shared/cli_5_1_5101852589026736340_v1" ClientId: "cli" PipeClient { RawX1: 7500137500840018755 RawX2: 4503621102209544 } Path: "/Root/PQ/rt3.dc1--topic1" } 2025-05-03T08:44:01.660633Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2025-05-03T08:44:01.660654Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 got read request: guid# 81b8cabd-88d0954d-7f5b8b11-75609506 2025-05-03T08:44:01.660905Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1, pipe: [5:7500137500840018758:2571] 2025-05-03T08:44:01.660989Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/cli_5_1_5101852589026736340_v1:1 with generation 1 2025-05-03T08:44:01.662633Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 SizeLag: 409 WriteTimestampEstimateMS: 1746261841658 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-05-03T08:44:01.662649Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2025-05-03T08:44:01.662671Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 sending to client partition status 2025-05-03T08:44:01.662937Z :INFO: [] [] [1b2a7653-45265b4a-79e7aabd-a35643a2] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (NULL) 2025-05-03T08:44:01.663113Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2025-05-03T08:44:01.663155Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2025-05-03T08:44:01.663167Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-05-03T08:44:01.663177Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2025-05-03T08:44:01.663203Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 0, sizeLag# 409 2025-05-03T08:44:01.663211Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1TEvPartitionReady. Aval parts: 1 2025-05-03T08:44:01.663219Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 performing read request: guid# 6eef2d72-3acfb83f-3bade97b-8ff30174, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 490, partitionsAsked# 1, maxTimeLag# 0ms 2025-05-03T08:44:01.663241Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 490 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid 6eef2d72-3acfb83f-3bade97b-8ff30174 2025-05-03T08:44:01.663645Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1746261841558 CreateTimestampMS: 1746261841557 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1746261841559 CreateTimestampMS: 1746261841557 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1746261841559 CreateTimestampMS: 1746261841557 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 43 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 0 } 2025-05-03T08:44:01.663685Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset3 2025-05-03T08:44:01.663691Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 6eef2d72-3acfb83f-3bade97b-8ff30174 has messages 1 2025-05-03T08:44:01.663747Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 read done: guid# 6eef2d72-3acfb83f-3bade97b-8ff30174, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 371 2025-05-03T08:44:01.663756Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 response to read: guid# 6eef2d72-3acfb83f-3bade97b-8ff30174 2025-05-03T08:44:01.663838Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 Process answer. Aval parts: 0 2025-05-03T08:44:01.663930Z :DEBUG: [] [] [1b2a7653-45265b4a-79e7aabd-a35643a2] [] Got ReadResponse, serverBytesSize = 371, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428429 2025-05-03T08:44:01.663978Z :DEBUG: [] [] [1b2a7653-45265b4a-79e7aabd-a35643a2] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428429 2025-05-03T08:44:01.664050Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2025-05-03T08:44:01.664065Z :DEBUG: [] [] [1b2a7653-45265b4a-79e7aabd-a35643a2] [] Returning serverBytesSize = 371 to budget 2025-05-03T08:44:01.664081Z :DEBUG: [] [] [1b2a7653-45265b4a-79e7aabd-a35643a2] [] In ContinueReadingDataImpl, ReadSizeBudget = 371, ReadSizeServerDelta = 52428429 2025-05-03T08:44:01.664171Z :DEBUG: [] [] [1b2a7653-45265b4a-79e7aabd-a35643a2] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-05-03T08:44:01.664214Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-05-03T08:44:01.664237Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-05-03T08:44:01.664242Z :DEBUG: [] Take Data. Partition 0. Read: {1, 1} (2-2) 2025-05-03T08:44:01.664253Z :DEBUG: [] [] [1b2a7653-45265b4a-79e7aabd-a35643a2] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2025-05-03T08:44:01.664261Z :DEBUG: [] [] [1b2a7653-45265b4a-79e7aabd-a35643a2] [] Returning serverBytesSize = 0 to budget 2025-05-03T08:44:01.664289Z :INFO: [] [] [1b2a7653-45265b4a-79e7aabd-a35643a2] Closing read session. Close timeout: 0.000000s 2025-05-03T08:44:01.664269Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 grpc read done: success# 1, data# { read_request { bytes_size: 371 } } 2025-05-03T08:44:01.664297Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2025-05-03T08:44:01.664309Z :INFO: [] [] [1b2a7653-45265b4a-79e7aabd-a35643a2] Counters: { Errors: 0 CurrentSessionLifetimeMs: 5 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-05-03T08:44:01.664327Z :NOTICE: [] [] [1b2a7653-45265b4a-79e7aabd-a35643a2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-05-03T08:44:01.664319Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 got read request: guid# 98e3d841-14ffd062-978b3923-2d3b34d8 2025-05-03T08:44:01.664332Z :DEBUG: [] [] [1b2a7653-45265b4a-79e7aabd-a35643a2] [] Abort session to cluster 2025-05-03T08:44:01.664464Z :NOTICE: [] [] [1b2a7653-45265b4a-79e7aabd-a35643a2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-05-03T08:44:01.664567Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 grpc read done: success# 0, data# { } 2025-05-03T08:44:01.664574Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 grpc read failed 2025-05-03T08:44:01.664578Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 grpc closed 2025-05-03T08:44:01.664594Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5101852589026736340_v1 is DEAD 2025-05-03T08:44:01.664749Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/cli_5_1_5101852589026736340_v1 2025-05-03T08:44:01.664771Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] pipe [5:7500137500840018755:2568] disconnected; active server actors: 1 2025-05-03T08:44:01.664784Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037898][rt3.dc1--topic1] pipe [5:7500137500840018755:2568] client cli disconnected session shared/cli_5_1_5101852589026736340_v1 >> TTableProfileTests::OverwriteStoragePolicy [GOOD] >> TTableProfileTests::OverwriteCachingPolicy >> TGRpcClientLowTest::MultipleSimpleRequests [GOOD] >> TGRpcLdapAuthentication::LdapAuthServerIsUnavailable >> TGRpcNewClient::YqlExplainDataQuery [GOOD] >> TGRpcNewCoordinationClient::CheckUnauthorized >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession >> YdbQueryService::TestForbidExecuteWithoutAttach [GOOD] >> YdbQueryService::TestCreateDropAttachSession >> TGRpcYdbTest::ExecuteQueryBadRequest >> TYqlDateTimeTests::TimestampKey [GOOD] >> TYqlDateTimeTests::IntervalKey >> YdbLogStore::LogTable [GOOD] >> YdbLogStore::AlterLogTable >> TGRpcLdapAuthentication::LdapAuthWithInvalidSearchFilter [GOOD] >> TGRpcNewClient::SimpleYqlQuery |96.2%| [TA] $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.2%| [TA] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata [GOOD] >> YdbYqlClient::TestYqlLongSessionPrepareError [GOOD] >> YdbYqlClient::TestYqlLongSessionMultipleErrors >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideAnyCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedCert >> LocalityOperation::LocksFromAnotherTenants-UseSink [GOOD] >> YdbYqlClient::SecurityTokenAuthMultiTenantSDK >> YdbYqlClient::DiscoveryLocationOverride ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogPagingAfter [GOOD] Test command err: 2025-05-03T08:43:47.078085Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137438508597213:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:47.078112Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00012e/r3tmp/tmpJit23P/pdisk_1.dat 2025-05-03T08:43:47.146562Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30402, node 1 2025-05-03T08:43:47.168154Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:47.168165Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:47.168167Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:47.168207Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:43:47.178771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:47.178801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:47.181620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26587 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:47.223430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:26587 2025-05-03T08:43:47.248261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:47.264664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137438508598235:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-03T08:43:47.264722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137438508598235:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-03T08:43:47.264767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137438508598235:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-03T08:43:47.264791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137438508598235:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-03T08:43:47.264808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137438508598235:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-03T08:43:47.264825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137438508598235:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-03T08:43:47.264846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137438508598235:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-03T08:43:47.264867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137438508598235:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-05-03T08:43:47.264892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137438508598235:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-03T08:43:47.264920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137438508598235:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-05-03T08:43:47.264941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137438508598235:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-03T08:43:47.264966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137438508598235:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-03T08:43:47.268233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137438508598242:2326];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-03T08:43:47.268247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137438508598242:2326];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-03T08:43:47.268286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137438508598242:2326];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-03T08:43:47.268302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137438508598242:2326];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-03T08:43:47.268320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137438508598242:2326];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-03T08:43:47.268334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137438508598242:2326];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-03T08:43:47.268347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137438508598242:2326];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-03T08:43:47.268362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137438508598242:2326];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-05-03T08:43:47.268379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137438508598242:2326];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-03T08:43:47.268397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137438508598242:2326];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-05-03T08:43:47.268413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137438508598242:2326];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-03T08:43:47.268427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7500137438508598242:2326];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-03T08:43:47.271574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7500137438508598233:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-03T08:43:47.271596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7500137438508598233:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-03T08:43:47.271630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7500137438508598233:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-03T08:43:47.271646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7500137438508598233:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-03T08:43:47.271660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7500137438508598233:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-03T08:43:47.271678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7500137438508598233:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-03T08:43:47.271691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7500137438508598233:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-03T08:43:47.271706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7500137438508598233:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-05-03T08:43:47.271722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7500137438508598233:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=S ... node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7500137497516080399:3084] TxId: 281474976710670. Ctx: { TraceId: 01jtan25hp0mq8enkxfqyk8gee, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7500137497516080467:3143], CA [28:7500137497516080474:3148], CA [28:7500137497516080481:3155], CA [28:7500137497516080475:3149], 2025-05-03T08:44:01.546125Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7500137497516080399:3084] TxId: 281474976710670. Ctx: { TraceId: 01jtan25hp0mq8enkxfqyk8gee, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7500137497516080474:3148], task: 59, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 205 Tasks { TaskId: 59 CpuTimeUs: 93 FinishTimeMs: 1746261841545 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 23 BuildCpuTimeUs: 70 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-26z4vhl2yy" NodeId: 28 CreateTimeMs: 1746261841533 UpdateTimeMs: 1746261841545 } MaxMemoryUsage: 1048576 } 2025-05-03T08:44:01.546128Z node 28 :KQP_EXECUTER INFO: TxId: 281474976710670. Ctx: { TraceId: 01jtan25hp0mq8enkxfqyk8gee, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7500137497516080474:3148] 2025-05-03T08:44:01.546131Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7500137497516080399:3084] TxId: 281474976710670. Ctx: { TraceId: 01jtan25hp0mq8enkxfqyk8gee, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7500137497516080467:3143], CA [28:7500137497516080481:3155], CA [28:7500137497516080475:3149], 2025-05-03T08:44:01.546148Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7500137497516080399:3084] TxId: 281474976710670. Ctx: { TraceId: 01jtan25hp0mq8enkxfqyk8gee, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7500137497516080467:3143], task: 54, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 164 Tasks { TaskId: 54 CpuTimeUs: 89 FinishTimeMs: 1746261841545 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 13 BuildCpuTimeUs: 76 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-26z4vhl2yy" NodeId: 28 CreateTimeMs: 1746261841533 UpdateTimeMs: 1746261841545 } MaxMemoryUsage: 1048576 } 2025-05-03T08:44:01.546151Z node 28 :KQP_EXECUTER INFO: TxId: 281474976710670. Ctx: { TraceId: 01jtan25hp0mq8enkxfqyk8gee, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7500137497516080467:3143] 2025-05-03T08:44:01.546156Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7500137497516080399:3084] TxId: 281474976710670. Ctx: { TraceId: 01jtan25hp0mq8enkxfqyk8gee, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7500137497516080481:3155], CA [28:7500137497516080475:3149], 2025-05-03T08:44:01.546164Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, ActorId: [28:7500137497516080378:3084], ActorState: ExecuteState, TraceId: 01jtan25hp0mq8enkxfqyk8gee, Forwarded TEvStreamData to [28:7500137497516080376:3083] 2025-05-03T08:44:01.546191Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7500137497516080399:3084] TxId: 281474976710670. Ctx: { TraceId: 01jtan25hp0mq8enkxfqyk8gee, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7500137497516080475:3149], task: 60, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 170 Tasks { TaskId: 60 CpuTimeUs: 84 FinishTimeMs: 1746261841545 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 19 BuildCpuTimeUs: 65 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-26z4vhl2yy" NodeId: 28 CreateTimeMs: 1746261841534 UpdateTimeMs: 1746261841545 } MaxMemoryUsage: 1048576 } 2025-05-03T08:44:01.546195Z node 28 :KQP_EXECUTER INFO: TxId: 281474976710670. Ctx: { TraceId: 01jtan25hp0mq8enkxfqyk8gee, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7500137497516080475:3149] 2025-05-03T08:44:01.546200Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7500137497516080399:3084] TxId: 281474976710670. Ctx: { TraceId: 01jtan25hp0mq8enkxfqyk8gee, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7500137497516080481:3155], 2025-05-03T08:44:01.546324Z node 28 :KQP_EXECUTER DEBUG: TxId: 281474976710670, send ack to channelId: 66, seqNo: 1, enough: 0, freeSpace: 8388469, to: [28:7500137497516080536:3155] 2025-05-03T08:44:01.546334Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7500137497516080481:3155], TxId: 281474976710670, task: 66. Ctx: { SessionId : ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=. TraceId : 01jtan25hp0mq8enkxfqyk8gee. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-05-03T08:44:01.546345Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 66. Tasks execution finished, don't wait for ack delivery in input channelId: 65, seqNo: [1] 2025-05-03T08:44:01.546347Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 66. Tasks execution finished 2025-05-03T08:44:01.546349Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7500137497516080481:3155], TxId: 281474976710670, task: 66. Ctx: { SessionId : ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=. TraceId : 01jtan25hp0mq8enkxfqyk8gee. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-05-03T08:44:01.546367Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 66. pass away 2025-05-03T08:44:01.546380Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710670;task_id=66;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-05-03T08:44:01.546398Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7500137497516080399:3084] TxId: 281474976710670. Ctx: { TraceId: 01jtan25hp0mq8enkxfqyk8gee, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7500137497516080481:3155], task: 66, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 143 Tasks { TaskId: 66 StageId: 2 CpuTimeUs: 34 FinishTimeMs: 1746261841546 ComputeCpuTimeUs: 14 BuildCpuTimeUs: 20 HostName: "ghrun-26z4vhl2yy" NodeId: 28 CreateTimeMs: 1746261841535 UpdateTimeMs: 1746261841546 } MaxMemoryUsage: 1048576 } 2025-05-03T08:44:01.546407Z node 28 :KQP_EXECUTER INFO: TxId: 281474976710670. Ctx: { TraceId: 01jtan25hp0mq8enkxfqyk8gee, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7500137497516080481:3155] 2025-05-03T08:44:01.546438Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7500137497516080399:3084] TxId: 281474976710670. Ctx: { TraceId: 01jtan25hp0mq8enkxfqyk8gee, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-05-03T08:44:01.546447Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7500137497516080399:3084] TxId: 281474976710670. Ctx: { TraceId: 01jtan25hp0mq8enkxfqyk8gee, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.013013s ReadRows: 0 ReadBytes: 0 ru: 8 rate limiter was not found force flag: 1 2025-05-03T08:44:01.546461Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, ActorId: [28:7500137497516080378:3084], ActorState: ExecuteState, TraceId: 01jtan25hp0mq8enkxfqyk8gee, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-05-03T08:44:01.546546Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, ActorId: [28:7500137497516080378:3084], ActorState: ExecuteState, TraceId: 01jtan25hp0mq8enkxfqyk8gee, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 21.512 QueriesCount: 1 2025-05-03T08:44:01.546561Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, ActorId: [28:7500137497516080378:3084], ActorState: ExecuteState, TraceId: 01jtan25hp0mq8enkxfqyk8gee, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-05-03T08:44:01.546582Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, ActorId: [28:7500137497516080378:3084], ActorState: ExecuteState, TraceId: 01jtan25hp0mq8enkxfqyk8gee, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-05-03T08:44:01.546584Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, ActorId: [28:7500137497516080378:3084], ActorState: ExecuteState, TraceId: 01jtan25hp0mq8enkxfqyk8gee, EndCleanup, isFinal: 1 2025-05-03T08:44:01.546595Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, ActorId: [28:7500137497516080378:3084], ActorState: ExecuteState, TraceId: 01jtan25hp0mq8enkxfqyk8gee, Sent query response back to proxy, proxyRequestId: 5, proxyId: [28:7500137493221109818:2279] 2025-05-03T08:44:01.546597Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, ActorId: [28:7500137497516080378:3084], ActorState: unknown state, TraceId: 01jtan25hp0mq8enkxfqyk8gee, Cleanup temp tables: 0 2025-05-03T08:44:01.547152Z node 28 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1746261841506, txId: 18446744073709551615] shutting down 2025-05-03T08:44:01.547184Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=YjFhZjUxZTMtNWNhYjNjNTQtN2U0YWMxNzEtNzFmODMxYTY=, ActorId: [28:7500137497516080378:3084], ActorState: unknown state, TraceId: 01jtan25hp0mq8enkxfqyk8gee, Session actor destroyed >> TGRpcLdapAuthentication::LdapAuthServerIsUnavailable [GOOD] >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism >> TGRpcNewCoordinationClient::CheckUnauthorized [GOOD] >> TGRpcNewCoordinationClient::BasicMethods >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession [GOOD] >> TGRpcYdbTest::ExplainQuery >> YdbOlapStore::LogExistingUserId [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> LocalityOperation::LocksFromAnotherTenants-UseSink [GOOD] Test command err: 2025-05-03T08:43:56.929997Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137478111355909:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:56.930045Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f0/r3tmp/tmpwUtrXK/pdisk_1.dat 2025-05-03T08:43:56.980864Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29132, node 1 2025-05-03T08:43:57.007072Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:57.007082Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:57.007084Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:57.007119Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-03T08:43:57.033205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:57.033231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:57.034966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:57.035523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-03T08:43:57.803977Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137483282475194:2212];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:57.804013Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f0/r3tmp/tmpY9SE50/pdisk_1.dat 2025-05-03T08:43:57.818444Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13890, node 4 2025-05-03T08:43:57.831972Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:57.831983Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:57.831985Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:57.832027Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16576 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:57.904017Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:57.904054Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:57.905757Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:57.907757Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:58.612121Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137485744127441:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:58.612144Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f0/r3tmp/tmpYDyHqd/pdisk_1.dat 2025-05-03T08:43:58.626501Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30106, node 7 2025-05-03T08:43:58.638777Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:58.638790Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:58.638792Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:58.638828Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5663 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:58.712450Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:58.712482Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:58.714067Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:58.715588Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:5663 2025-05-03T08:43:58.745639Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:58.749378Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:59.250374Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500137491223124273:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:59.250453Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_tenant_0/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:43:59.252682Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:59.252705Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:59.254004Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-05-03T08:43:59.254241Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:59.260161Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:59.264387Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:59.766158Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500137491268346189:2210];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:59.767746Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:59.767772Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:59.769623Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-05-03T08:43:59.769859Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:59.771587Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:43:59.778537Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-03T08:43:59.854561Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard ... Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-03T08:44:00.332073Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2025-05-03T08:44:00.332138Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-03T08:44:00.772326Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-03T08:44:00.934229Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137494329484824:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:00.934299Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f0/r3tmp/tmpmgcj0c/pdisk_1.dat 2025-05-03T08:44:00.948213Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27445, node 10 2025-05-03T08:44:00.963300Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:00.963313Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:00.963315Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:00.963348Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24731 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-03T08:44:01.034913Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:01.034945Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:01.045292Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:01.046660Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:24731 2025-05-03T08:44:01.080553Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:01.084909Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:01.586125Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7500137497711673790:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:01.586163Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_tenant_0/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:44:01.588116Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:01.588139Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:01.589843Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-05-03T08:44:01.590036Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:01.597985Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:01.602860Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:02.104233Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7500137503776668792:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:02.104279Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:44:02.107923Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:02.107948Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:02.109269Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 11 Cookie 11 2025-05-03T08:44:02.109613Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:02.117336Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-03T08:44:02.135107Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-03T08:44:02.477499Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137502919421294:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:02.477499Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137502919421304:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:02.477519Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:02.478183Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2025-05-03T08:44:02.481687Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7500137502919421308:2368], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-05-03T08:44:02.575540Z node 10 :TX_PROXY ERROR: Actor# [10:7500137502919421380:3349] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:02.584866Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jtan26hda41me4039hn85rg8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ODExNzMyNTktY2I1NGJkNDMtMmU0MmExMWMtNTBlNTRj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:02.596278Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jtan26mwbrwh9fmpqwk4g0m5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ODExNzMyNTktY2I1NGJkNDMtMmU0MmExMWMtNTBlNTRj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:02.612353Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jtan26n75mgs4ywsvsw2wp3r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ODExNzMyNTktY2I1NGJkNDMtMmU0MmExMWMtNTBlNTRj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:02.614076Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jtan26n75mgs4ywsvsw2wp3r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ODExNzMyNTktY2I1NGJkNDMtMmU0MmExMWMtNTBlNTRj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:02.614621Z node 10 :KQP_EXECUTER ERROR: ActorId: [10:7500137502919421479:2357] TxId: 281474976715669. Ctx: { TraceId: 01jtan26n75mgs4ywsvsw2wp3r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ODExNzMyNTktY2I1NGJkNDMtMmU0MmExMWMtNTBlNTRj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Handle TEvProposeTransactionResult: unable to select coordinator. Tx canceled, actorId: [10:7500137502919421479:2357], previously selected coordinator: 72075186224037888, coordinator selected at propose result: 72075186224037890 2025-05-03T08:44:02.614676Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ODExNzMyNTktY2I1NGJkNDMtMmU0MmExMWMtNTBlNTRj, ActorId: [10:7500137502919421118:2357], ActorState: ExecuteState, TraceId: 01jtan26n75mgs4ywsvsw2wp3r, Create QueryResponse for error on request, msg: 2025-05-03T08:44:02.614806Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jtan26n75mgs4ywsvsw2wp3r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ODExNzMyNTktY2I1NGJkNDMtMmU0MmExMWMtNTBlNTRj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:02.616313Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2025-05-03T08:44:02.616420Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-03T08:44:02.616442Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-05-03T08:44:02.616478Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-03T08:44:03.105269Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/initialization/migrations;error=incorrect path status: LookupError; >> YdbQueryService::TestCreateDropAttachSession [GOOD] >> YdbQueryService::TestCreateAttachAndDropAttachedSession >> TGRpcYdbTest::ExecuteQueryBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryExplicitSession >> YdbLogStore::AlterLogTable [FAIL] >> TYqlDateTimeTests::IntervalKey [GOOD] >> TYqlDateTimeTests::SimpleOperations >> YdbYqlClient::RetryOperationSync [GOOD] >> YdbYqlClient::RetryOperationLimitedDuration >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism [GOOD] >> TTableProfileTests::ExplicitPartitionsSimple >> YdbS3Internal::TestS3Listing >> TGRpcNewClient::SimpleYqlQuery [GOOD] >> TGRpcNewClient::CreateAlterUpsertDrop >> YdbYqlClient::TestYqlLongSessionMultipleErrors [GOOD] >> YdbYqlClient::DiscoveryLocationOverride [GOOD] >> YdbYqlClient::DeleteTableWithDeletedIndex >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_EmptyAllowedSids [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_AllowOnlyDefaultGroup >> YdbYqlClient::RetryOperationAsync ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism [GOOD] Test command err: 2025-05-03T08:43:59.920941Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137492700216617:2276];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:59.920994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000d9/r3tmp/tmpqsSZPd/pdisk_1.dat 2025-05-03T08:43:59.983665Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6645, node 1 2025-05-03T08:44:00.019611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:00.019638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:00.020432Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:00.020440Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:00.020442Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:00.020499Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:44:00.021018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5307 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:00.062747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:00.777742Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137497211797641:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:00.778018Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000d9/r3tmp/tmp0yIvt9/pdisk_1.dat 2025-05-03T08:44:00.795638Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24661, node 4 2025-05-03T08:44:00.820416Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:00.820429Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:00.820431Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:00.820478Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:00.878182Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:00.878211Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:00.879725Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:00.880284Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:01.628781Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137501001042760:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:01.628801Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000d9/r3tmp/tmpKGxs5z/pdisk_1.dat 2025-05-03T08:44:01.644762Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11843, node 7 2025-05-03T08:44:01.665617Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:01.665632Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:01.665634Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:01.665681Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12699 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:01.729109Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:01.729139Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:01.730545Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:01.734193Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:02.678401Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137502407311161:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:02.678421Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000d9/r3tmp/tmpri9WeE/pdisk_1.dat 2025-05-03T08:44:02.695628Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27706, node 10 2025-05-03T08:44:02.711502Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:02.711512Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:02.711513Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:02.711541Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:02.782562Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:02.782592Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:02.783522Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:44:02.783634Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-03T08:44:03.337669Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137507333284695:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:03.337703Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000d9/r3tmp/tmp6KbgLc/pdisk_1.dat 2025-05-03T08:44:03.352017Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24556, node 13 2025-05-03T08:44:03.374680Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:03.374696Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:03.374698Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:03.374747Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6848 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:03.437659Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:03.437702Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:03.439105Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:03.441935Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> YdbYqlClient::SecurityTokenAuthMultiTenantSDK [GOOD] >> YdbYqlClient::SecurityTokenAuthMultiTenantSDKAsync >> TGRpcNewCoordinationClient::BasicMethods [GOOD] >> YdbQueryService::TestCreateAttachAndDropAttachedSession [GOOD] >> YdbScripting::MultiResults ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestYqlLongSessionMultipleErrors [GOOD] Test command err: 2025-05-03T08:44:00.204494Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137495742937400:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:00.204515Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000d5/r3tmp/tmpjLDs6V/pdisk_1.dat 2025-05-03T08:44:00.254518Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32227, node 1 2025-05-03T08:44:00.284826Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:00.284838Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:00.284858Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:00.284899Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:00.304954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:00.304981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:00.305800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-05-03T08:44:00.306457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-03T08:44:00.477138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137495742938347:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.477161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.502372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:00.567761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137495742938511:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.567790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.567801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137495742938516:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.568553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-05-03T08:44:00.572277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500137495742938518:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-05-03T08:44:00.653454Z node 1 :TX_PROXY ERROR: Actor# [1:7500137495742938595:2779] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:00.661537Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7500137495742938613:2352], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:43: Error: Failed to convert type: Struct<'Key':String,'Value':String> to Struct<'Key':Uint32?,'Value':String?>
:2:43: Error: Failed to convert 'Key': String to Optional
:2:43: Error: Failed to convert input columns types to scheme types, code: 2031 2025-05-03T08:44:00.661846Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjZkMjI5NzYtNWZhYWYxN2UtZGM5MDcxYzYtYTRjOWU3, ActorId: [1:7500137495742938328:2332], ActorState: ExecuteState, TraceId: 01jtan24nqc86w724dndpnr989, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-05-03T08:44:01.216778Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137497502523873:2212];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000d5/r3tmp/tmpj6vVix/pdisk_1.dat 2025-05-03T08:44:01.218371Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:44:01.246632Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21970, node 4 2025-05-03T08:44:01.264921Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:01.264938Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:01.264939Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:01.264970Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:01.280026Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:01.319775Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:01.319800Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:01.321324Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:01.544610Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137497502524667:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:01.544632Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:02.058425Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137504485785366:2076];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:02.058605Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000d5/r3tmp/tmpjG0Y75/pdisk_1.dat 2025-05-03T08:44:02.082370Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10302, node 7 2025-05-03T08:44:02.102050Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:02.102066Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:02.102068Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:02.102091Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9526 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingPar ... tileState: Unknown -> Disconnected 2025-05-03T08:44:02.158513Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:02.159967Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:02.162076Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:02.389590Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137504485786301:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:02.389644Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:02.390974Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:02.453761Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137504485786467:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:02.453784Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:02.453817Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137504485786472:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:02.454486Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:44:02.459311Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7500137504485786474:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:44:02.513723Z node 7 :TX_PROXY ERROR: Actor# [7:7500137504485786549:2772] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:02.521494Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan26gn8thc9mzzk87cwk3a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NjcyMzVjZjUtZTBlZjZmMjEtNzcxMzZmNjYtOWY4NTQ1Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:02.530352Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7500137504485786593:2359], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[Root/BadTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-03T08:44:02.530411Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=NjcyMzVjZjUtZTBlZjZmMjEtNzcxMzZmNjYtOWY4NTQ1Mw==, ActorId: [7:7500137504485786283:2332], ActorState: ExecuteState, TraceId: 01jtan26jx05r81v8rrvbjbwx6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-03T08:44:02.537707Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jtan26k32yc850yveeyg5t4h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NjcyMzVjZjUtZTBlZjZmMjEtNzcxMzZmNjYtOWY4NTQ1Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:02.554246Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jtan26kc3sqbyhnyk79nf1z7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NjcyMzVjZjUtZTBlZjZmMjEtNzcxMzZmNjYtOWY4NTQ1Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:03.135017Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137509942841771:2148];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:03.135744Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000d5/r3tmp/tmpoD27fH/pdisk_1.dat 2025-05-03T08:44:03.149936Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8362, node 10 2025-05-03T08:44:03.163254Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:03.163265Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:03.163266Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:03.163292Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15270 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-03T08:44:03.237303Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:03.237345Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:03.265271Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:03.266786Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:03.433026Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137509942842628:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:03.433071Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:03.434330Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:03.493998Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137509942842792:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:03.494023Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:03.494031Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137509942842797:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:03.494589Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:44:03.502579Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7500137509942842799:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:44:03.553669Z node 10 :TX_PROXY ERROR: Actor# [10:7500137509942842870:2762] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:03.556169Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7500137509942842888:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Cannot find table 'db.[Root/BadTable1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-03T08:44:03.556248Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=NzE2YWNiM2QtM2JhNDBiNTktYTYzNTMzNDItN2MzOGEwY2M=, ActorId: [10:7500137509942842610:2332], ActorState: ExecuteState, TraceId: 01jtan27h52pzg00f93exd19x9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-03T08:44:03.559505Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7500137509942842906:2358], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Cannot find table 'db.[Root/BadTable2]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-03T08:44:03.559571Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=NzE2YWNiM2QtM2JhNDBiNTktYTYzNTMzNDItN2MzOGEwY2M=, ActorId: [10:7500137509942842610:2332], ActorState: ExecuteState, TraceId: 01jtan27k5ehrddgztfstdv2b4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> TTableProfileTests::OverwriteCachingPolicy [GOOD] >> YdbMonitoring::SelfCheckWithNodesDying [GOOD] >> YdbOlapStore::BulkUpsert >> TGRpcYdbTest::ExplainQuery [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::BasicMethods [GOOD] Test command err: 2025-05-03T08:43:59.731944Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137490922538694:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:59.731967Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000e1/r3tmp/tmpJ537TV/pdisk_1.dat 2025-05-03T08:43:59.788519Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6997, node 1 2025-05-03T08:43:59.809073Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:59.809084Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:59.809086Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:59.809120Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-03T08:43:59.832455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:59.832494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:59.837488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:59.865058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:00.587039Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137494886372283:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:00.587108Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000e1/r3tmp/tmpEr3yOZ/pdisk_1.dat 2025-05-03T08:44:00.603111Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1964, node 4 2025-05-03T08:44:00.628454Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:00.628463Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:00.628464Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:00.628493Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30785 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:00.687816Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:00.687852Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:00.688978Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:00.689802Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:00.883019Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137494886373234:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.883046Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137494886373226:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.883062Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.883713Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-03T08:44:00.887309Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7500137494886373240:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-03T08:44:00.948443Z node 4 :TX_PROXY ERROR: Actor# [4:7500137494886373319:2657] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:01.620669Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137499082000342:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:01.620691Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000e1/r3tmp/tmpJk0rwb/pdisk_1.dat 2025-05-03T08:44:01.643357Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8750, node 7 2025-05-03T08:44:01.658011Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:01.658026Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:01.658028Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:01.658075Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21592 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:01.720661Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:01.720692Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:01.722138Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:01.726410Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:01.918519Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137499082001274:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:01.918538Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:01.921457Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:01.985420Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137499082001444:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:01.985461Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:01.985580Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137499082001449:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:01.986316Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:44:01.990899Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7500137499082001451:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:44:02.048463Z node 7 :TX_PROXY ERROR: Actor# [7:7500137503376968816:2763] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:02.664340Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137503451708314:2079];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:02.664367Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000e1/r3tmp/tmp0O9nlI/pdisk_1.dat 2025-05-03T08:44:02.681743Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6993, node 10 2025-05-03T08:44:02.705240Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:02.705257Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:02.705259Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:02.705299Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12147 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:02.762652Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:02.762690Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:02.763419Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:44:02.764204Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-03T08:44:02.788599Z node 10 :TX_PROXY ERROR: Actor# [10:7500137503451709198:2578] txid# 281474976715658, Access denied for bad@builtin on path /Root, with access CreateTable 2025-05-03T08:44:02.788642Z node 10 :TX_PROXY ERROR: Actor# [10:7500137503451709198:2578] txid# 281474976715658, issues: { message: "Access denied for bad@builtin on path /Root" issue_code: 200000 severity: 1 } 2025-05-03T08:44:03.524041Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137509304573021:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:03.524064Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000e1/r3tmp/tmpPT5JhE/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3620, node 13 2025-05-03T08:44:03.550503Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:03.550515Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:03.550517Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:03.550555Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:44:03.550868Z node 13 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10986 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:03.624314Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:03.624349Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:03.625813Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:03.627497Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:03.639677Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogExistingUserId [GOOD] Test command err: 2025-05-03T08:43:49.289324Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137449933363565:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:49.289338Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000122/r3tmp/tmpzwxBWR/pdisk_1.dat 2025-05-03T08:43:49.402806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:49.402833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:49.404399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13234, node 1 TClient is connected to server localhost:6173 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:49.480757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:49.492602Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-03T08:43:49.492962Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:49.492973Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:49.492974Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:49.493009Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6173 2025-05-03T08:43:49.506086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:49.525646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137449933364587:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-03T08:43:49.525724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137449933364587:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-03T08:43:49.525783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137449933364587:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-03T08:43:49.525813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137449933364587:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-03T08:43:49.525839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137449933364587:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-03T08:43:49.525863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137449933364587:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-03T08:43:49.525886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137449933364587:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-03T08:43:49.525933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137449933364587:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-05-03T08:43:49.525964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137449933364587:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-03T08:43:49.525983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137449933364587:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-05-03T08:43:49.526004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137449933364587:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-03T08:43:49.526030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7500137449933364587:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-03T08:43:49.529973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137449933364592:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-03T08:43:49.530003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137449933364592:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-03T08:43:49.530056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137449933364592:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-03T08:43:49.530083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137449933364592:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-03T08:43:49.530108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137449933364592:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-03T08:43:49.530132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137449933364592:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-03T08:43:49.530159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137449933364592:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-03T08:43:49.530182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137449933364592:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-05-03T08:43:49.530201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137449933364592:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-05-03T08:43:49.530225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137449933364592:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-05-03T08:43:49.530250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137449933364592:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-05-03T08:43:49.530274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7500137449933364592:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-05-03T08:43:49.534161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7500137449933364590:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-05-03T08:43:49.534190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7500137449933364590:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-05-03T08:43:49.534235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7500137449933364590:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-05-03T08:43:49.534254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7500137449933364590:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-05-03T08:43:49.534280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7500137449933364590:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-05-03T08:43:49.534303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7500137449933364590:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-05-03T08:43:49.534320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7500137449933364590:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-05-03T08:43:49.534344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7500137449933364590:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-05-03T08:43:49.534373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7500137449933364590:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Syn ... oot. PoolId : default. Database : /Root. }. CA StateFunc 271646923 2025-05-03T08:44:03.002869Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 65. Finish input channelId: 52, from: [28:7500137505334134376:3134] 2025-05-03T08:44:03.002872Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7500137505334134389:3147], TxId: 281474976715670, task: 65. Ctx: { SessionId : ydb://session/3?node_id=28&id=NTYyODU2ODgtMzMzODRlM2QtMTMzMWFlYTgtNmE1OTcwOGM=. TraceId : 01jtan26xe79qskstp5t3amsdn. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646923 2025-05-03T08:44:03.002874Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 65. Finish input channelId: 53, from: [28:7500137505334134377:3135] 2025-05-03T08:44:03.002876Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7500137505334134389:3147], TxId: 281474976715670, task: 65. Ctx: { SessionId : ydb://session/3?node_id=28&id=NTYyODU2ODgtMzMzODRlM2QtMTMzMWFlYTgtNmE1OTcwOGM=. TraceId : 01jtan26xe79qskstp5t3amsdn. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-05-03T08:44:03.002921Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7500137505334134389:3147], TxId: 281474976715670, task: 65. Ctx: { SessionId : ydb://session/3?node_id=28&id=NTYyODU2ODgtMzMzODRlM2QtMTMzMWFlYTgtNmE1OTcwOGM=. TraceId : 01jtan26xe79qskstp5t3amsdn. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-05-03T08:44:03.002933Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 28. Tasks execution finished 2025-05-03T08:44:03.002935Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7500137505334134352:3110], TxId: 281474976715670, task: 28. Ctx: { CustomerSuppliedId : . TraceId : 01jtan26xe79qskstp5t3amsdn. SessionId : ydb://session/3?node_id=28&id=NTYyODU2ODgtMzMzODRlM2QtMTMzMWFlYTgtNmE1OTcwOGM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-05-03T08:44:03.002944Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 9. Tasks execution finished 2025-05-03T08:44:03.002947Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7500137505334134333:3091], TxId: 281474976715670, task: 9. Ctx: { TraceId : 01jtan26xe79qskstp5t3amsdn. SessionId : ydb://session/3?node_id=28&id=NTYyODU2ODgtMzMzODRlM2QtMTMzMWFlYTgtNmE1OTcwOGM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-05-03T08:44:03.002953Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 9. pass away 2025-05-03T08:44:03.002955Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 28. pass away 2025-05-03T08:44:03.002958Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715670;task_id=9;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-05-03T08:44:03.002970Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715670;task_id=28;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-05-03T08:44:03.003042Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 1. Tasks execution finished 2025-05-03T08:44:03.003044Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7500137505334134325:3083], TxId: 281474976715670, task: 1. Ctx: { TraceId : 01jtan26xe79qskstp5t3amsdn. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=28&id=NTYyODU2ODgtMzMzODRlM2QtMTMzMWFlYTgtNmE1OTcwOGM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-05-03T08:44:03.003051Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 1. pass away 2025-05-03T08:44:03.003056Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715670;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-05-03T08:44:03.003097Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 4. Tasks execution finished 2025-05-03T08:44:03.003099Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7500137505334134328:3086], TxId: 281474976715670, task: 4. Ctx: { TraceId : 01jtan26xe79qskstp5t3amsdn. SessionId : ydb://session/3?node_id=28&id=NTYyODU2ODgtMzMzODRlM2QtMTMzMWFlYTgtNmE1OTcwOGM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-05-03T08:44:03.003105Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 4. pass away 2025-05-03T08:44:03.003111Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715670;task_id=4;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-05-03T08:44:03.003148Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 10. Tasks execution finished 2025-05-03T08:44:03.003150Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7500137505334134334:3092], TxId: 281474976715670, task: 10. Ctx: { SessionId : ydb://session/3?node_id=28&id=NTYyODU2ODgtMzMzODRlM2QtMTMzMWFlYTgtNmE1OTcwOGM=. CustomerSuppliedId : . TraceId : 01jtan26xe79qskstp5t3amsdn. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-05-03T08:44:03.003157Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 10. pass away 2025-05-03T08:44:03.003162Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715670;task_id=10;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-05-03T08:44:03.003245Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 2. Tasks execution finished 2025-05-03T08:44:03.003247Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7500137505334134326:3084], TxId: 281474976715670, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=28&id=NTYyODU2ODgtMzMzODRlM2QtMTMzMWFlYTgtNmE1OTcwOGM=. TraceId : 01jtan26xe79qskstp5t3amsdn. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-05-03T08:44:03.003254Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 2. pass away 2025-05-03T08:44:03.003259Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715670;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-05-03T08:44:03.003342Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 11. Tasks execution finished 2025-05-03T08:44:03.003344Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7500137505334134335:3093], TxId: 281474976715670, task: 11. Ctx: { SessionId : ydb://session/3?node_id=28&id=NTYyODU2ODgtMzMzODRlM2QtMTMzMWFlYTgtNmE1OTcwOGM=. CustomerSuppliedId : . TraceId : 01jtan26xe79qskstp5t3amsdn. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-05-03T08:44:03.003351Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 11. pass away 2025-05-03T08:44:03.003356Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715670;task_id=11;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-05-03T08:44:03.003434Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 3. Tasks execution finished 2025-05-03T08:44:03.003436Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7500137505334134327:3085], TxId: 281474976715670, task: 3. Ctx: { SessionId : ydb://session/3?node_id=28&id=NTYyODU2ODgtMzMzODRlM2QtMTMzMWFlYTgtNmE1OTcwOGM=. TraceId : 01jtan26xe79qskstp5t3amsdn. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-05-03T08:44:03.003442Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 3. pass away 2025-05-03T08:44:03.003448Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715670;task_id=3;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-05-03T08:44:03.003531Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 44. Tasks execution finished 2025-05-03T08:44:03.003532Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7500137505334134368:3126], TxId: 281474976715670, task: 44. Ctx: { TraceId : 01jtan26xe79qskstp5t3amsdn. SessionId : ydb://session/3?node_id=28&id=NTYyODU2ODgtMzMzODRlM2QtMTMzMWFlYTgtNmE1OTcwOGM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-05-03T08:44:03.003539Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 44. pass away 2025-05-03T08:44:03.003544Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715670;task_id=44;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-05-03T08:44:03.003631Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 20. Tasks execution finished 2025-05-03T08:44:03.003636Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7500137505334134344:3102], TxId: 281474976715670, task: 20. Ctx: { TraceId : 01jtan26xe79qskstp5t3amsdn. SessionId : ydb://session/3?node_id=28&id=NTYyODU2ODgtMzMzODRlM2QtMTMzMWFlYTgtNmE1OTcwOGM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-05-03T08:44:03.003644Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 20. pass away 2025-05-03T08:44:03.003649Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715670;task_id=20;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-05-03T08:44:03.003733Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 29. Tasks execution finished 2025-05-03T08:44:03.003734Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7500137505334134353:3111], TxId: 281474976715670, task: 29. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=28&id=NTYyODU2ODgtMzMzODRlM2QtMTMzMWFlYTgtNmE1OTcwOGM=. TraceId : 01jtan26xe79qskstp5t3amsdn. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-05-03T08:44:03.003742Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 29. pass away 2025-05-03T08:44:03.003748Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715670;task_id=29;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-05-03T08:44:03.003869Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7500137505334134320:3078] TxId: 281474976715670. Ctx: { TraceId: 01jtan26xe79qskstp5t3amsdn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NTYyODU2ODgtMzMzODRlM2QtMTMzMWFlYTgtNmE1OTcwOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7500137505334134336:3094], task: 12, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 210 Tasks { TaskId: 12 CpuTimeUs: 153 FinishTimeMs: 1746261843001 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 22 BuildCpuTimeUs: 131 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-26z4vhl2yy" NodeId: 28 CreateTimeMs: 1746261842923 CurrentWaitOutputTimeUs: 9 UpdateTimeMs: 1746261843001 } MaxMemoryUsage: 1048576 } >> YdbTableBulkUpsert::Overload [GOOD] >> YdbTableBulkUpsert::RetryOperationSync >> TGRpcYdbTest::ExecuteQueryExplicitSession [GOOD] >> TGRpcYdbTest::ExecuteDmlQuery >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedCert [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbQueryService::TestCreateAttachAndDropAttachedSession [GOOD] Test command err: 2025-05-03T08:44:00.211575Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137496006420914:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:00.211606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000d2/r3tmp/tmpGH9IZs/pdisk_1.dat 2025-05-03T08:44:00.277137Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62042, node 1 2025-05-03T08:44:00.299081Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:00.299112Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:00.299113Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:00.299149Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:44:00.312124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:00.312159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:3924 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-03T08:44:00.313622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:00.347038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:00.540838Z node 1 :KQP_PROXY WARN: Failed to parse session id: unknownSesson 2025-05-03T08:44:01.076768Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137500356403415:2211];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:01.076903Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000d2/r3tmp/tmpOlhWA9/pdisk_1.dat 2025-05-03T08:44:01.094746Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15184, node 4 2025-05-03T08:44:01.112746Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:01.112759Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:01.112761Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:01.112815Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:01.176415Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:01.176449Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:01.178029Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:01.186328Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:01.909403Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137498365460303:2246];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:01.909431Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000d2/r3tmp/tmpFecRMn/pdisk_1.dat 2025-05-03T08:44:01.924857Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16587, node 7 2025-05-03T08:44:01.945234Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:01.945248Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:01.945250Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:01.945293Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:02.009466Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:02.009496Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:02.011074Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:02.013149Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:02.782083Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137502057361150:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:02.782187Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000d2/r3tmp/tmpJRHxNm/pdisk_1.dat 2025-05-03T08:44:02.795412Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7145, node 10 2025-05-03T08:44:02.813084Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:02.813096Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:02.813098Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:02.813143Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26320 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:02.883902Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:02.883936Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:02.885430Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:02.885569Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:03.646390Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137507384513659:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:03.646407Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000d2/r3tmp/tmpUbZ17u/pdisk_1.dat 2025-05-03T08:44:03.663957Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10499, node 13 2025-05-03T08:44:03.687816Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:03.687830Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:03.687831Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:03.687873Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19385 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:03.747219Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:03.747285Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:03.748877Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:03.750358Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:03.958519Z node 13 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-05-03T08:44:03.958928Z node 13 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-05-03T08:44:03.961117Z node 13 :KQP_PROXY DEBUG: TraceId: "01jtan27sn0epprj094ym8336j", Request has 18444997811865.590506s seconds to be completed 2025-05-03T08:44:03.961531Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=YzU3ZDljZDUtNjZiMjdmOS1jMzllMDkzZC02NTk3NjEyZQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YzU3ZDljZDUtNjZiMjdmOS1jMzllMDkzZC02NTk3NjEyZQ== 2025-05-03T08:44:03.961552Z node 13 :KQP_PROXY DEBUG: TraceId: "01jtan27sn0epprj094ym8336j", Created new session, sessionId: ydb://session/3?node_id=13&id=YzU3ZDljZDUtNjZiMjdmOS1jMzllMDkzZC02NTk3NjEyZQ==, workerId: [13:7500137507384514571:2327], database: , longSession: 1, local sessions count: 1 2025-05-03T08:44:03.961564Z node 13 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-05-03T08:44:03.961589Z node 13 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jtan27sn0epprj094ym8336j 2025-05-03T08:44:03.961614Z node 13 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-05-03T08:44:03.961623Z node 13 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-05-03T08:44:03.961627Z node 13 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-05-03T08:44:03.961630Z node 13 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-05-03T08:44:03.961646Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=YzU3ZDljZDUtNjZiMjdmOS1jMzllMDkzZC02NTk3NjEyZQ==, ActorId: [13:7500137507384514571:2327], ActorState: unknown state, session actor bootstrapped 2025-05-03T08:44:03.962224Z node 13 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-05-03T08:44:03.962232Z node 13 :KQP_PROXY DEBUG: Updated table service config. 2025-05-03T08:44:03.962236Z node 13 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-05-03T08:44:03.971435Z node 13 :KQP_PROXY DEBUG: Received ping session request, has local session: ydb://session/3?node_id=13&id=YzU3ZDljZDUtNjZiMjdmOS1jMzllMDkzZC02NTk3NjEyZQ==, rpc ctrl: [13:7500137507384514597:2330], sameNode: 1, trace_id: 2025-05-03T08:44:03.971454Z node 13 :KQP_PROXY TRACE: Attach local session: [13:7500137507384514571:2327] to rpc: [13:7500137507384514597:2330] on same node 2025-05-03T08:44:03.972998Z node 13 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=13&id=YzU3ZDljZDUtNjZiMjdmOS1jMzllMDkzZC02NTk3NjEyZQ==, ActorId: [13:7500137507384514571:2327], ActorState: ReadyState, Session closed due to explicit close event 2025-05-03T08:44:03.973019Z node 13 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=13&id=YzU3ZDljZDUtNjZiMjdmOS1jMzllMDkzZC02NTk3NjEyZQ==, ActorId: [13:7500137507384514571:2327], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-05-03T08:44:03.973022Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=YzU3ZDljZDUtNjZiMjdmOS1jMzllMDkzZC02NTk3NjEyZQ==, ActorId: [13:7500137507384514571:2327], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-05-03T08:44:03.973043Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=YzU3ZDljZDUtNjZiMjdmOS1jMzllMDkzZC02NTk3NjEyZQ==, ActorId: [13:7500137507384514571:2327], ActorState: unknown state, Cleanup temp tables: 0 2025-05-03T08:44:03.973070Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=YzU3ZDljZDUtNjZiMjdmOS1jMzllMDkzZC02NTk3NjEyZQ==, ActorId: [13:7500137507384514571:2327], ActorState: unknown state, Session actor destroyed 2025-05-03T08:44:03.973082Z node 13 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=13&id=YzU3ZDljZDUtNjZiMjdmOS1jMzllMDkzZC02NTk3NjEyZQ==, workerId: [13:7500137507384514571:2327], local sessions count: 0 2025-05-03T08:44:03.974205Z node 13 :KQP_PROXY DEBUG: Received ping session request, request_id: 3, sender: [13:7500137507384514600:2332], trace_id: 2025-05-03T08:44:03.974250Z node 13 :KQP_PROXY NOTICE: Session not found: ydb://session/3?node_id=13&id=YzU3ZDljZDUtNjZiMjdmOS1jMzllMDkzZC02NTk3NjEyZQ== 2025-05-03T08:44:03.974273Z node 13 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [13:7500137507384514600:2332], selfId: [13:7500137507384513880:2280], source: [13:7500137507384513880:2280] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::ExplainQuery [GOOD] Test command err: 2025-05-03T08:43:59.733659Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137488927957601:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:59.733681Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000e5/r3tmp/tmp8meezz/pdisk_1.dat 2025-05-03T08:43:59.797004Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16102, node 1 2025-05-03T08:43:59.815465Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:59.815478Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:59.815480Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:59.815521Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8860 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-03T08:43:59.833766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:59.833792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:59.835310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:59.843967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:00.611469Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137494210369347:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:00.611507Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000e5/r3tmp/tmpNL022m/pdisk_1.dat 2025-05-03T08:44:00.628959Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23292, node 4 2025-05-03T08:44:00.648384Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:00.648396Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:00.648398Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:00.648436Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29098 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:00.711896Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:00.711939Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:00.713400Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:00.713938Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:00.948222Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137494210370284:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.948247Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137494210370292:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.948255Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.948899Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-03T08:44:00.952816Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7500137494210370298:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-03T08:44:01.041134Z node 4 :TX_PROXY ERROR: Actor# [4:7500137498505337667:2651] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:01.633977Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137498797987388:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:01.634068Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000e5/r3tmp/tmpPZz27q/pdisk_1.dat 2025-05-03T08:44:01.650218Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62549, node 7 2025-05-03T08:44:01.674172Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:01.674187Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:01.674189Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:01.674235Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25314 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:01.734396Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:01.734445Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:01.736016Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:01.736536Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR E0000 00:00:1746261841.965210 79680 text_format.cc:383] Error parsing text-format Ydb.Type: 3:13: Unknown enumeration value of "TYPE_UNDEFINED" for field "type_id". 2025-05-03T08:44:01.965879Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137498797988317:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:01.965906Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:01.965957Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137498797988329:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:01.966621Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-03T08:44:01.970143Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7500137498797988331:2335], Databas ... teNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:02.080404Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=MTlmMGNjYS1lYzg0MGI1NS00ZjBhYjlhMi1hZWU4NjcwYQ==, ActorId: [7:7500137498797988299:2329], ActorState: ExecuteState, TraceId: 01jtan261daexympgkp0kykfkm, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1003: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: 2025-05-03T08:44:02.080642Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jtan261daexympgkp0kykfkm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MTlmMGNjYS1lYzg0MGI1NS00ZjBhYjlhMi1hZWU4NjcwYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root
: Error: Unsupported protobuf type:
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1003: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: E0000 00:00:1746261842.081170 79680 text_format.cc:383] Error parsing text-format Ydb.Type: 5:21: Unknown enumeration value of "Int32" for field "type_id". 2025-05-03T08:44:02.088800Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=MTlmMGNjYS1lYzg0MGI1NS00ZjBhYjlhMi1hZWU4NjcwYQ==, ActorId: [7:7500137498797988299:2329], ActorState: ExecuteState, TraceId: 01jtan2651b2cmak6g3jy4tmsq, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1003: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: 2025-05-03T08:44:02.088955Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan2651b2cmak6g3jy4tmsq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MTlmMGNjYS1lYzg0MGI1NS00ZjBhYjlhMi1hZWU4NjcwYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root
: Error: Unsupported protobuf type:
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1003: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: 2025-05-03T08:44:02.676125Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137501945385644:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:02.676147Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000e5/r3tmp/tmpVaqWAJ/pdisk_1.dat 2025-05-03T08:44:02.695949Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22112, node 10 2025-05-03T08:44:02.717878Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:02.717894Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:02.717897Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:02.717956Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27601 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-03T08:44:02.776283Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:02.776319Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:02.789277Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:02.790768Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:02.958515Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137501945386576:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:02.958534Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:02.958571Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137501945386588:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:02.959325Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-03T08:44:02.964252Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7500137501945386590:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-03T08:44:03.030522Z node 10 :TX_PROXY ERROR: Actor# [10:7500137506240353959:2647] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000e5/r3tmp/tmp4pVgwT/pdisk_1.dat 2025-05-03T08:44:03.542154Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-03T08:44:03.554248Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12094, node 13 2025-05-03T08:44:03.574159Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:03.574175Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:03.574177Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:03.574233Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23879 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:03.636962Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:03.637020Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:03.638432Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:03.642161Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:03.654555Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:03.864444Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137506246421474:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:03.864469Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137506246421466:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:03.864531Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:03.865192Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:44:03.869020Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7500137506246421480:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:44:03.923365Z node 13 :TX_PROXY ERROR: Actor# [13:7500137506246421555:2770] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:03.933561Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan27wq4m0fcae1g3bybxry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NDgwMjBhMWMtYWQyZWE4MDgtMzgzNmQ2NjQtMTFmNDJlZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_pretty [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_base64 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TTableProfileTests::OverwriteCachingPolicy [GOOD] Test command err: 2025-05-03T08:43:55.138755Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137475183560759:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:55.138773Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000fa/r3tmp/tmpAX2fQM/pdisk_1.dat 2025-05-03T08:43:55.202100Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30250, node 1 2025-05-03T08:43:55.221917Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:55.221926Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:55.221928Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:55.221964Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10596 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-03T08:43:55.238787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:55.238807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-05-03T08:43:55.241300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:55.247841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:10596 2025-05-03T08:43:55.272707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:55.276678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:55.781825Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500137473188334491:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:55.781850Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:43:55.793829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:55.793859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:55.797368Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-05-03T08:43:55.797634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10596 2025-05-03T08:43:55.813166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:10596 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1746261836230 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-05-03T08:43:56.217204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:10596 TClient::Ls request: /Root/ydb_ut_tenant/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1746261836320 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-05-03T08:43:56.303065Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-05-03T08:43:56.303556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-03T08:43:56.943457Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137478828951992:2078];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:56.943482Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000fa/r3tmp/tmpKSnBTq/pdisk_1.dat 2025-05-03T08:43:56.958235Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15764, node 4 2025-05-03T08:43:56.978653Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:56.978677Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:56.978678Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:56.978714Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27542 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:57.043651Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:57.043685Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:57.045147Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:57.047474Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:27542 2025-05-03T08:43:57.078179Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:57.088292Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:57.589259Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7500137484246221223:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:57.589283Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:43:57.591081Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:57.591117Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:57.592090Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-05-03T08:43:57.592318Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27542 2025-05-03T08:43:57.618211Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0 ... 4:01.889799Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:20755 TClient::Ls request: /Root/ydb_ut_tenant/table-3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-3" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715662 CreateStep: 1746261841990 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-05-03T08:44:01.974583Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:20755 TClient::Ls request: /Root/ydb_ut_tenant/table-4 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-4" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715663 CreateStep: 1746261842080 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-4" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-05-03T08:44:02.062479Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:20755 TClient::Ls request: /Root/ydb_ut_tenant/table-5 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-5" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715664 CreateStep: 1746261842170 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-5" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-05-03T08:44:02.145127Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-05-03T08:44:02.146144Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-03T08:44:02.302001Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-03T08:44:02.740554Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137504713323464:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:02.740575Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000fa/r3tmp/tmpgGCQM6/pdisk_1.dat 2025-05-03T08:44:02.755696Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18260, node 13 2025-05-03T08:44:02.781885Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:02.781900Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:02.781902Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:02.781974Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21511 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:02.839273Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:02.839317Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:02.841057Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:02.844646Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:21511 waiting... 2025-05-03T08:44:02.887669Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:02.892272Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-03T08:44:03.393739Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7500137509512242475:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:03.393775Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:44:03.395791Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:03.395821Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:03.397262Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-05-03T08:44:03.397498Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21511 2025-05-03T08:44:03.424887Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:21511 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1746261843960 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-05-03T08:44:03.946810Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:21511 TClient::Ls request: /Root/ydb_ut_tenant/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715661 CreateStep: 1746261844030 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-05-03T08:44:04.015943Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-05-03T08:44:04.016508Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_base64 [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_base64_array >> TYqlDateTimeTests::SimpleOperations [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_base64_array [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_unicode >> TGRpcNewClient::CreateAlterUpsertDrop [GOOD] >> TGRpcNewClient::InMemoryTables >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_pretty >> YdbImport::Simple >> YdbS3Internal::TestS3Listing [GOOD] >> YdbS3Internal::TestAccessCheck >> YdbYqlClient::DeleteTableWithDeletedIndex [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitions >> YdbYqlClient::SecurityTokenAuthMultiTenantSDKAsync [GOOD] >> YdbYqlClient::SimpleColumnFamilies >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_unicode [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_unicode_array ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TYqlDateTimeTests::SimpleOperations [GOOD] Test command err: 2025-05-03T08:43:59.238469Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137490624278044:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:59.238486Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000ec/r3tmp/tmpu2dCzn/pdisk_1.dat 2025-05-03T08:43:59.287718Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7504, node 1 2025-05-03T08:43:59.306395Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:59.306406Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:59.306407Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:59.306445Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:59.334946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:59.338845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:59.338867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:59.340235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:59.541508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:59.605448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137490624279133:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:59.605453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137490624279122:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:59.605465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:59.606107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:59.610301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500137490624279136:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-05-03T08:43:59.675760Z node 1 :TX_PROXY ERROR: Actor# [1:7500137490624279211:2767] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:59.723231Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jtan23qmawf6jf943wdh27bj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGVmNjI1MzEtN2Y3ZThhNTktYmUyZTQ1MzgtNWJmYTNmNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:59.747458Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jtan23vjb083nn99ax9rpmx3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGVmNjI1MzEtN2Y3ZThhNTktYmUyZTQ1MzgtNWJmYTNmNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:59.761594Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jtan23w57y7f68yv7pfabb4t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGVmNjI1MzEtN2Y3ZThhNTktYmUyZTQ1MzgtNWJmYTNmNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:59.776949Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jtan23wkfvz51mpv9gk3rv21, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGVmNjI1MzEtN2Y3ZThhNTktYmUyZTQ1MzgtNWJmYTNmNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:59.794000Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jtan23x52gfgytwx6mm0exca, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGVmNjI1MzEtN2Y3ZThhNTktYmUyZTQ1MzgtNWJmYTNmNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:00.493883Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137495212822420:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:00.493984Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000ec/r3tmp/tmpMHb0lG/pdisk_1.dat 2025-05-03T08:44:00.508346Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20951, node 4 2025-05-03T08:44:00.527411Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:00.527424Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:00.527426Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:00.527469Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:00.594263Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:00.594288Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:00.595874Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:00.597442Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:00.819076Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:00.878608Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137495212823505:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.878613Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137495212823516:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.878627Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:00.879165Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:44:00.884178Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7500137495212823519:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:44:00.982862Z node 4 :TX_PROXY ERROR: Actor# [4:7500137495212823592:2771] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:00.991331Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan24ze4f97pfjbb6hsrk23, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZDlkNzI4ZTYtZTFmYTAzYWUtOWY0YmNhZjYtZDk3MDQyZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:01.009165Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01 ... tatus: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:03.152178Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:44:03.159287Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7500137507428179663:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:44:03.234277Z node 10 :TX_PROXY ERROR: Actor# [10:7500137507428179732:2755] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:03.242053Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan276f5zdqq4d3sjr9x27d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZjgyMzExNGEtOTdjOTZhYmUtNDQyY2U3YmUtMzgwZjhkMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:03.258486Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jtan279dartjp77sh0x56cvt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZjgyMzExNGEtOTdjOTZhYmUtNDQyY2U3YmUtMzgwZjhkMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:03.274917Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jtan279w340xxmqqwr2ktcvy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZjgyMzExNGEtOTdjOTZhYmUtNDQyY2U3YmUtMzgwZjhkMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:03.290459Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jtan27acfjtefqehhhxg9pn9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZjgyMzExNGEtOTdjOTZhYmUtNDQyY2U3YmUtMzgwZjhkMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:03.867379Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137509807015017:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:03.867408Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000ec/r3tmp/tmp5wz0nE/pdisk_1.dat 2025-05-03T08:44:03.889201Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22513, node 13 2025-05-03T08:44:03.908783Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:03.908801Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:03.908803Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:03.908849Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4739 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:03.967891Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:03.967926Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:03.969101Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:03.969135Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:04.218425Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:04.278115Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-03T08:44:04.289558Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137514101983512:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:04.289560Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137514101983501:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:04.289573Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:04.290166Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-03T08:44:04.300773Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7500137514101983515:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-03T08:44:04.370279Z node 13 :TX_PROXY ERROR: Actor# [13:7500137514101983590:2856] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:04.379186Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jtan28a1f19j9rp1tjfcf9e7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTI5NDVmNDMtY2IxYzMzOGItZjVhODE3OWMtMzUyNmE5ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:04.390946Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jtan28cydyqnekzvcyn72r7z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTI5NDVmNDMtY2IxYzMzOGItZjVhODE3OWMtMzUyNmE5ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:04.433840Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jtan28da74gte125gqsm2c04, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTI5NDVmNDMtY2IxYzMzOGItZjVhODE3OWMtMzUyNmE5ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:04.435557Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jtan28da74gte125gqsm2c04, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTI5NDVmNDMtY2IxYzMzOGItZjVhODE3OWMtMzUyNmE5ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:04.472191Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jtan28ep6ha2swsnf1q6y9az, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTI5NDVmNDMtY2IxYzMzOGItZjVhODE3OWMtMzUyNmE5ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:04.473395Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jtan28ep6ha2swsnf1q6y9az, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTI5NDVmNDMtY2IxYzMzOGItZjVhODE3OWMtMzUyNmE5ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:04.487034Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jtan28fw7cxvdd6atefv3nb8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTI5NDVmNDMtY2IxYzMzOGItZjVhODE3OWMtMzUyNmE5ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:04.497800Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jtan28g841x83mssecg170gj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTI5NDVmNDMtY2IxYzMzOGItZjVhODE3OWMtMzUyNmE5ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:04.508642Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jtan28gk7jvzx45bc9w8dr23, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTI5NDVmNDMtY2IxYzMzOGItZjVhODE3OWMtMzUyNmE5ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:04.519444Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jtan28gybrq8er336fracxtq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTI5NDVmNDMtY2IxYzMzOGItZjVhODE3OWMtMzUyNmE5ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:04.529854Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jtan28h85xhgfrr2njezbctc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTI5NDVmNDMtY2IxYzMzOGItZjVhODE3OWMtMzUyNmE5ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:04.556128Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jtan28hkc69xqnv694sg9p8k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTI5NDVmNDMtY2IxYzMzOGItZjVhODE3OWMtMzUyNmE5ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:04.556881Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jtan28hkc69xqnv694sg9p8k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTI5NDVmNDMtY2IxYzMzOGItZjVhODE3OWMtMzUyNmE5ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> YdbYqlClient::TestYqlWrongTable >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_unicode_array [GOOD] >> YdbTableBulkUpsert::ValidRetry >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_pretty >> YdbYqlClient::TestReadTableMultiShard >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_pretty [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_base64 >> YdbYqlClient::ConnectDbAclIsStrictlyChecked >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_base64 [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_base64_array >> YdbYqlClient::ConnectDbAclIsOffWhenTokenIsOptionalAndNull >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_AllowOnlyDefaultGroup [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithIssuerVerification_ClientWithSameIssuer >> YdbScripting::MultiResults [GOOD] >> YdbScripting::Params >> TTableProfileTests::ExplicitPartitionsSimple [GOOD] >> TTableProfileTests::ExplicitPartitionsUnordered >> TGRpcYdbTest::ExecuteDmlQuery [GOOD] >> TGRpcYdbTest::ExecutePreparedQuery >> TGRpcYdbTest::MakeListRemoveDirectory >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_base64_array [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_unicode >> YdbTableBulkUpsert::RetryOperationSync [GOOD] >> YdbTableBulkUpsert::RetryOperation >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_unicode [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_unicode_array >> TGRpcNewClient::InMemoryTables [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired >> YdbS3Internal::TestAccessCheck [GOOD] >> YdbS3Internal::BadRequests >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_unicode_array [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitions [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning >> YdbImport::Simple [GOOD] >> YdbIndexTable::AlterIndexImplBySuperUser >> YdbYqlClient::TestYqlWrongTable [GOOD] >> YdbYqlClient::TraceId >> YdbOlapStore::ManyTables [GOOD] >> YdbOlapStore::LogPagingBetween ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcNewClient::InMemoryTables [GOOD] Test command err: 2025-05-03T08:44:01.207804Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137498437697765:2213];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000cf/r3tmp/tmpN5puYy/pdisk_1.dat 2025-05-03T08:44:01.249360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:44:01.269302Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12626, node 1 2025-05-03T08:44:01.305229Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:01.305239Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:01.305250Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:01.305287Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:44:01.307174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:01.307198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:01.308761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28712 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:01.334886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:02.219520Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137503173243010:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:02.219803Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000cf/r3tmp/tmpodtPZS/pdisk_1.dat 2025-05-03T08:44:02.234319Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7948, node 4 2025-05-03T08:44:02.252620Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:02.252631Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:02.252633Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:02.252674Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23581 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:02.319971Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:02.320005Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:02.321621Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:02.323009Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:03.066339Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137507539022548:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:03.066360Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000cf/r3tmp/tmpHZMLbY/pdisk_1.dat 2025-05-03T08:44:03.082947Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1756, node 7 2025-05-03T08:44:03.105226Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:03.105241Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:03.105243Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:03.105289Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32656 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:03.167327Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:03.167359Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:03.168815Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:03.169999Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:03.411450Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:03.475094Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137507539023630:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:03.475135Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137507539023635:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:03.475144Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:03.475851Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:44:03.481429Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7500137507539023644:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:44:03.545761Z node 7 :TX_PROXY ERROR: Actor# [7:7500137507539023723:2770] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:04.128454Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137511029322000:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:04.128487Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000cf/r3tmp/tmpcew7sq/pdisk_1.dat 2025-05-03T08:44:04.149746Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17231, node 10 2025-05-03T08:44:04.163056Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:04.163073Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:04.163076Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:04.163110Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26004 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:04.177185Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:04.229265Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:04.229300Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:04.230757Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:04.463794Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-03T08:44:04.523388Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-05-03T08:44:04.532751Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137511029323201:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:04.532752Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137511029323211:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:04.532770Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:04.533312Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-05-03T08:44:04.537789Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7500137511029323215:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-05-03T08:44:04.631803Z node 10 :TX_PROXY ERROR: Actor# [10:7500137511029323290:2862] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:04.640075Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jtan28hm6zykbhgdf85ezdfr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZmM0MzZmZWYtMTg4ZmM4OTYtZjE2NDViZjUtNmU4YmE1MmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:04.660085Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-05-03T08:44:05.191214Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137516885987879:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:05.191255Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000cf/r3tmp/tmpbnqpyP/pdisk_1.dat 2025-05-03T08:44:05.206693Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27596, node 13 2025-05-03T08:44:05.222456Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:05.222470Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:05.222472Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:05.222502Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7266 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:05.291587Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:05.291638Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:05.293261Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:05.294800Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:05.518487Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:05.579469Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-03T08:44:05.591320Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 >> YdbYqlClient::ConnectDbAclIsStrictlyChecked [GOOD] >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase >> YdbTableBulkUpsert::ValidRetry [GOOD] >> YdbTableBulkUpsert::Types >> YdbYqlClient::ConnectDbAclIsOffWhenTokenIsOptionalAndNull [GOOD] >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex >> YdbYqlClient::TestReadTableMultiShard [GOOD] >> YdbYqlClient::TestReadTableMultiShardUseSnapshot >> TGRpcYdbTest::ExecutePreparedQuery [GOOD] >> TGRpcYdbTest::ExecuteQueryCache >> YdbScripting::Params [GOOD] >> YdbTableBulkUpsert::DataValidation >> TGRpcYdbTest::MakeListRemoveDirectory [GOOD] >> TGRpcYdbTest::GetOperationBadRequest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] Test command err: 2025-05-03T08:43:43.956419Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137420542011521:2079];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:43.956651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000146/r3tmp/tmpwfR1Lg/pdisk_1.dat 2025-05-03T08:43:44.023184Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31280, node 1 2025-05-03T08:43:44.040281Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:44.040294Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:44.040300Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:44.040340Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:43:44.056555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:44.056582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:44.058008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65480 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:44.076776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:44.107338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Uint8" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:55126" , at schemeshard: 72057594046644480 2025-05-03T08:43:44.107426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:44.107605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: LogStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-05-03T08:43:44.107625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-05-03T08:43:44.107634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-03T08:43:44.107648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-03T08:43:44.107654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-05-03T08:43:44.107666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-05-03T08:43:44.107728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-05-03T08:43:44.108220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-05-03T08:43:44.108397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-03T08:43:44.108419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:44.108442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-05-03T08:43:44.108458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-05-03T08:43:44.109283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-05-03T08:43:44.109317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-05-03T08:43:44.109382Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-03T08:43:44.109390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-05-03T08:43:44.109428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-05-03T08:43:44.109448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-03T08:43:44.109456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500137424836979401:2387], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-05-03T08:43:44.109460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500137424836979401:2387], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-05-03T08:43:44.109466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:44.109472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateOlapStore, at tablet# 72057594046644480 2025-05-03T08:43:44.109639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-03T08:43:44.109723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } Binde ... ESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:0 2025-05-03T08:44:02.299116Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715667:0 2025-05-03T08:44:02.299139Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-05-03T08:44:02.299462Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-03T08:44:02.299469Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-03T08:44:02.299469Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-05-03T08:44:02.299470Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-03T08:44:02.300862Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-03T08:44:02.301049Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[64:7500137502046829597:2325];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-03T08:44:02.301060Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-05-03T08:44:02.301075Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037889 not found 2025-05-03T08:44:02.301089Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037891 not found 2025-05-03T08:44:02.301092Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037888 not found 2025-05-03T08:44:02.301114Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-03T08:44:02.301264Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-05-03T08:44:02.301287Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037890 not found 2025-05-03T08:44:02.301296Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-03T08:44:02.301442Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-03T08:44:02.301468Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-03T08:44:02.301597Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-03T08:44:02.301640Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-03T08:44:02.301647Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-05-03T08:44:02.301657Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-05-03T08:44:02.302217Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[64:7500137502046829595:2324];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-03T08:44:02.303297Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[64:7500137502046829603:2326];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-03T08:44:02.303811Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-05-03T08:44:02.303826Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-03T08:44:02.303839Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-05-03T08:44:02.303840Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-03T08:44:02.304289Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[64:7500137502046829611:2327];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1152;event=tablet_die; 2025-05-03T08:44:02.304565Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-05-03T08:44:02.304580Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-05-03T08:44:02.304665Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-05-03T08:44:02.304679Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-03T08:44:02.304690Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-03T08:44:02.913116Z node 67 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[67:7500137505232971971:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:02.913132Z node 67 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000146/r3tmp/tmpPtCtPo/pdisk_1.dat 2025-05-03T08:44:02.932865Z node 67 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29636, node 67 2025-05-03T08:44:02.957195Z node 67 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:02.957207Z node 67 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:02.957209Z node 67 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:02.957259Z node 67 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:03.014974Z node 67 :HIVE WARN: HIVE#72057594037968897 Node(67, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:03.015020Z node 67 :HIVE WARN: HIVE#72057594037968897 Node(67, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:03.015904Z node 67 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:44:03.016323Z node 67 :HIVE WARN: HIVE#72057594037968897 Node(67, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-03T08:44:03.031425Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:41348" , at schemeshard: 72057594046644480 2025-05-03T08:44:03.031491Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:03.031496Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusPreconditionFailed, reason: Column stores are not supported, at schemeshard: 72057594046644480 2025-05-03T08:44:03.032154Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusPreconditionFailed Reason: "Column stores are not supported" TxId: 281474976715658 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-05-03T08:44:03.032198Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusPreconditionFailed, reason: Column stores are not supported, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-05-03T08:44:03.032252Z node 67 :TX_PROXY ERROR: Actor# [67:7500137509527940165:2581] txid# 281474976715658, issues: { message: "Column stores are not supported" severity: 1 } assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture()+28 (0x1634487C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x164B5259) NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&)+2476 (0x161C2E2C) NTestSuiteYdbLogStore::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x161D3E17) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x164B710E) NTestSuiteYdbLogStore::TCurrentTest::Execute()+429 (0x161D37DD) NUnitTest::TTestFactory::Execute()+803 (0x164B7883) NUnitTest::RunMain(int, char**)+3021 (0x164C942D) ??+0 (0x7FDA3203ED90) __libc_start_main+128 (0x7FDA3203EE40) _start+41 (0x14EAE029) >> TTableProfileTests::ExplicitPartitionsUnordered [GOOD] >> TTableProfileTests::ExplicitPartitionsComplex >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Date-pk_types13-all_types13-index13-Date--] >> TRegisterNodeOverDiscoveryService::ServerWithIssuerVerification_ClientWithSameIssuer [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithOutCertVerification_ClientProvidesExpiredCert >> YdbIndexTable::AlterIndexImplBySuperUser [GOOD] >> YdbIndexTable::CreateTableAddIndex >> YdbYqlClient::TraceId [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired [GOOD] >> YdbYqlClient::Utf8DatabasePassViaHeader >> YdbS3Internal::BadRequests [GOOD] >> YdbScripting::BasicV0 >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning [GOOD] >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_pretty [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_base64 >> YdbTableBulkUpsert::RetryOperation [GOOD] >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase [GOOD] >> YdbTableBulkUpsert::Types [GOOD] >> YdbTableBulkUpsert::Uint8 >> YdbYqlClient::CopyTables ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired [GOOD] Test command err: 2025-05-03T08:44:00.245225Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137493499886511:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:00.245242Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000dd/r3tmp/tmpJStV7S/pdisk_1.dat 2025-05-03T08:44:00.309464Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22643, node 1 2025-05-03T08:44:00.324699Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:00.324721Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:00.324722Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:00.324745Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12344 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-03T08:44:00.345789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:00.345820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-05-03T08:44:00.347279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:00.357865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:00.374202Z node 1 :TICKET_PARSER DEBUG: Ticket EBB322211C6716FAADD889F266BDA824F4BC776ED545B089655E4A88512D3889 (ipv6:[::1]:46372) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-05-03T08:44:00.374275Z node 1 :TICKET_PARSER ERROR: Ticket EBB322211C6716FAADD889F266BDA824F4BC776ED545B089655E4A88512D3889: Cannot create token from certificate. Client certificate failed verification 2025-05-03T08:44:00.381951Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:46382) has now valid token of root@builtin 2025-05-03T08:44:00.391538Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-05-03T08:44:00.391553Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-05-03T08:44:00.391555Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-05-03T08:44:00.391566Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-05-03T08:44:01.035257Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137500849186238:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:01.035337Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000dd/r3tmp/tmp1eaFYT/pdisk_1.dat 2025-05-03T08:44:01.052118Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6145, node 4 2025-05-03T08:44:01.070668Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:01.070680Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:01.070681Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:01.070728Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21540 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:01.135867Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:01.135905Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:01.137395Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:01.138487Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:01.160758Z node 4 :TICKET_PARSER DEBUG: Ticket EBB322211C6716FAADD889F266BDA824F4BC776ED545B089655E4A88512D3889 (ipv6:[::1]:58580) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-05-03T08:44:01.160831Z node 4 :TICKET_PARSER ERROR: Ticket EBB322211C6716FAADD889F266BDA824F4BC776ED545B089655E4A88512D3889: Cannot create token from certificate. Client certificate failed verification 2025-05-03T08:44:01.175501Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:58592) has now valid token of root@builtin 2025-05-03T08:44:01.190771Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-05-03T08:44:01.190791Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-05-03T08:44:01.190795Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-05-03T08:44:01.190806Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-05-03T08:44:01.882016Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137501133247312:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:01.882050Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000dd/r3tmp/tmpdE9E4Q/pdisk_1.dat 2025-05-03T08:44:01.896661Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29717, node 7 2025-05-03T08:44:01.914929Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:01.914951Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:01.914952Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:01.914989Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65480 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:01.982626Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:01.982665Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:01.984054Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:01.985171Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... E0503 08:44:01.994505793 83634 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0503 08:44:01.995347139 83696 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0503 08:44:01.997363277 83633 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0503 08:44:01.998174847 83695 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0503 08:44:02.000019250 83702 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0503 08:44:02.000704437 83544 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0503 08:44:02.002245929 83701 ssl_transport_security.cc:1431] Handshake faile ... grpc.internal.event_engine=0x77f3fc747f0, grpc.internal.subchannel_pool=0x77f3a7b7ec0, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x77f3b7e2650, grpc.server_uri=dns:///localhost:25603, grpc.socket_mutator=0x77f3a3b27e0} E0503 08:44:04.046251537 87234 ssl_transport_security.cc:783] Invalid cert chain file. E0503 08:44:04.046256667 87234 ssl_security_connector.cc:129] Handshaker factory creation failed with TSI_INVALID_ARGUMENT. E0503 08:44:04.046265818 87234 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:25603'; Got args: {grpc.client_channel_factory=0x77f3fd191e8, grpc.default_authority=localhost:25603, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x77f1c25c200, grpc.internal.event_engine=0x77f3fc74640, grpc.internal.subchannel_pool=0x77f3a7b7ec0, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x77f3b7e2650, grpc.server_uri=dns:///localhost:25603, grpc.socket_mutator=0x77f3a3b27e0} 2025-05-03T08:44:04.761282Z node 19 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7500137511252088019:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:04.761303Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000dd/r3tmp/tmpiv1x5N/pdisk_1.dat 2025-05-03T08:44:04.777886Z node 19 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2864, node 19 2025-05-03T08:44:04.795030Z node 19 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:04.795043Z node 19 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:04.795045Z node 19 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:04.795083Z node 19 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:04.866223Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:04.866268Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:04.867158Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:44:04.867609Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-03T08:44:04.889940Z node 19 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:37760) has now valid token of root@builtin 2025-05-03T08:44:04.898862Z node 19 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-05-03T08:44:04.898880Z node 19 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-05-03T08:44:04.898884Z node 19 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-05-03T08:44:04.898895Z node 19 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000dd/r3tmp/tmp4l9wDP/pdisk_1.dat 2025-05-03T08:44:05.447271Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-03T08:44:05.457640Z node 22 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28614, node 22 2025-05-03T08:44:05.485017Z node 22 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:05.485064Z node 22 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:05.485066Z node 22 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:05.485109Z node 22 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16267 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:05.542077Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:05.542110Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:05.543226Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:44:05.543314Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-03T08:44:05.569003Z node 22 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:45180) has now valid token of root@builtin 2025-05-03T08:44:05.587016Z node 22 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-05-03T08:44:05.587034Z node 22 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-05-03T08:44:05.587037Z node 22 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-05-03T08:44:05.587049Z node 22 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-05-03T08:44:06.541307Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7500137519399765794:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:06.541327Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000dd/r3tmp/tmpq2RG7m/pdisk_1.dat 2025-05-03T08:44:06.553105Z node 25 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18702, node 25 2025-05-03T08:44:06.572761Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:06.572773Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:06.572775Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:06.572820Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4827 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:06.641838Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:06.641880Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:06.643503Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:06.644943Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:06.669460Z node 25 :TICKET_PARSER DEBUG: Ticket 8BB767570680E41B6BBC400033DE38269E858F4174E363C108A23516D0201800 (ipv6:[::1]:56446) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-05-03T08:44:06.691438Z node 25 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token 2025-05-03T08:44:06.701975Z node 25 :TICKET_PARSER DEBUG: Ticket 71EA0D6E248197C5F1A4449BF686E8DD07AE055AE7AED4C106F3300C209BB795 (ipv6:[::1]:56466) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-05-03T08:44:06.702054Z node 25 :TICKET_PARSER ERROR: Ticket 71EA0D6E248197C5F1A4449BF686E8DD07AE055AE7AED4C106F3300C209BB795: Cannot create token from certificate. Client certificate failed verification >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_base64 [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_base64_array >> YdbTableBulkUpsert::DataValidation [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldFail >> TGRpcYdbTest::GetOperationBadRequest [GOOD] >> TGRpcYdbTest::OperationTimeout >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_base64_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_unicode >> YdbYqlClient::TestReadTableMultiShardUseSnapshot [GOOD] >> YdbYqlClient::TestReadTableMultiShardOneRow |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_unicode [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_unicode_array >> TGRpcYdbTest::ExecuteQueryCache [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_unicode_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_csv >> YdbIndexTable::CreateTableAddIndex [GOOD] >> YdbIndexTable::AlterTableAddIndex >> YdbYqlClient::Utf8DatabasePassViaHeader [GOOD] >> YdbYqlClient::TestYqlTypesFromPreparedQuery ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::RetryOperation [GOOD] Test command err: 2025-05-03T08:43:56.947108Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137479869353512:2216];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:56.947591Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000ee/r3tmp/tmp1AINef/pdisk_1.dat 2025-05-03T08:43:57.014374Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65077, node 1 2025-05-03T08:43:57.040829Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:57.040842Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:57.040844Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:57.040899Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:43:57.048375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:57.048393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:57.050113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:57.086904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:57.255795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 SUCCESS 3 rows in 0.003416s 2025-05-03T08:43:57.336563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137484164323510:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:57.336563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137484164323499:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:57.336579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:57.337250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:57.341240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500137484164323513:2438], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:43:57.421228Z node 1 :TX_PROXY ERROR: Actor# [1:7500137484164323582:4052] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:57.490253Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan21grbknt8thk00jz8572, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTg4YWU3OGMtZWQwMWMyZGItMmFmZjEyY2MtYzBmZmVkODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 3 rows 2025-05-03T08:43:58.209890Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137485720470944:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:58.209943Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000ee/r3tmp/tmpblosUn/pdisk_1.dat 2025-05-03T08:43:58.221835Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18353, node 4 2025-05-03T08:43:58.242729Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:58.242743Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:58.242745Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:58.242787Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:58.310481Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:58.310507Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:58.311976Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:58.313639Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:58.492590Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/ui8' Only async-indexed tables are supported by BulkUpsert
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable' unknown table 2025-05-03T08:43:59.041200Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137490753817179:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:59.041224Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000ee/r3tmp/tmpefKIE1/pdisk_1.dat 2025-05-03T08:43:59.056782Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18022, node 7 2025-05-03T08:43:59.073670Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:59.073685Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:59.073686Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:59.073728Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2759 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:59.142065Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:59.142095Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:59.143727Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:59.144749Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:2759 2025-05-03T08:43:59.165672Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:59.235508Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [7:7500137490753818486:2898], serverId# [7:7500137490753818489:2901], ses ... node 7 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 .2025-05-03T08:44:04.235057Z node 7 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-03T08:44:04.235067Z node 7 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2025-05-03T08:44:04.235120Z node 7 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-03T08:44:04.235132Z node 7 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-05-03T08:44:04.235575Z node 7 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-03T08:44:04.235602Z node 7 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-05-03T08:44:04.237155Z node 7 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-03T08:44:04.237170Z node 7 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-05-03T08:44:04.856229Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137511140836352:2154];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000ee/r3tmp/tmpvXB64B/pdisk_1.dat 2025-05-03T08:44:04.861110Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:44:04.870373Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30357, node 10 2025-05-03T08:44:04.891563Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:04.891576Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:04.891579Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:04.891626Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4890 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:04.954729Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:04.954755Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:04.956415Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:04.958547Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:05.157712Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Injecting ABORTED 10 times Result: ABORTED Injecting ABORTED 6 times Result: ABORTED Injecting ABORTED 5 times Result: SUCCESS Injecting ABORTED 3 times Result: SUCCESS Injecting ABORTED 0 times Result: SUCCESS Injecting OVERLOADED 10 times Result: OVERLOADED Injecting OVERLOADED 6 times Result: OVERLOADED Injecting OVERLOADED 5 times Result: SUCCESS Injecting OVERLOADED 3 times Result: SUCCESS Injecting OVERLOADED 0 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 10 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 6 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 5 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 3 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 0 times Result: SUCCESS Injecting UNAVAILABLE 10 times Result: UNAVAILABLE Injecting UNAVAILABLE 6 times Result: UNAVAILABLE Injecting UNAVAILABLE 5 times Result: SUCCESS Injecting UNAVAILABLE 3 times Result: SUCCESS Injecting UNAVAILABLE 0 times Result: SUCCESS Injecting BAD_SESSION 10 times Result: BAD_SESSION Injecting BAD_SESSION 6 times Result: BAD_SESSION Injecting BAD_SESSION 5 times Result: SUCCESS Injecting BAD_SESSION 3 times Result: SUCCESS Injecting BAD_SESSION 0 times Result: SUCCESS Injecting SESSION_BUSY 10 times Result: SESSION_BUSY Injecting SESSION_BUSY 6 times Result: SESSION_BUSY Injecting SESSION_BUSY 5 times Result: SUCCESS Injecting SESSION_BUSY 3 times Result: SUCCESS Injecting SESSION_BUSY 0 times Result: SUCCESS Injecting NOT_FOUND 10 times Result: NOT_FOUND Injecting NOT_FOUND 6 times Result: NOT_FOUND Injecting NOT_FOUND 5 times Result: SUCCESS Injecting NOT_FOUND 3 times Result: SUCCESS Injecting NOT_FOUND 0 times Result: SUCCESS Injecting UNDETERMINED 10 times Result: UNDETERMINED Injecting UNDETERMINED 6 times Result: UNDETERMINED Injecting UNDETERMINED 5 times Result: SUCCESS Injecting UNDETERMINED 3 times Result: SUCCESS Injecting UNDETERMINED 0 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 10 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 6 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 5 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 3 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 0 times Result: SUCCESS test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000ee/r3tmp/tmph91sWw/pdisk_1.dat 2025-05-03T08:44:06.111110Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-03T08:44:06.122376Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18764, node 13 2025-05-03T08:44:06.143508Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:06.143520Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:06.143523Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:06.143569Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61147 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:06.206555Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:06.206591Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:06.208034Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:06.210206Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:06.441617Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Injecting ABORTED 10 times Result: ABORTED Injecting ABORTED 6 times Result: ABORTED Injecting ABORTED 5 times Result: SUCCESS Injecting ABORTED 3 times Result: SUCCESS Injecting ABORTED 0 times Result: SUCCESS Injecting OVERLOADED 10 times Result: OVERLOADED Injecting OVERLOADED 6 times Result: OVERLOADED Injecting OVERLOADED 5 times Result: SUCCESS Injecting OVERLOADED 3 times Result: SUCCESS Injecting OVERLOADED 0 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 10 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 6 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 5 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 3 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 0 times Result: SUCCESS Injecting UNAVAILABLE 10 times Result: UNAVAILABLE Injecting UNAVAILABLE 6 times Result: UNAVAILABLE Injecting UNAVAILABLE 5 times Result: SUCCESS Injecting UNAVAILABLE 3 times Result: SUCCESS Injecting UNAVAILABLE 0 times Result: SUCCESS Injecting BAD_SESSION 10 times Result: BAD_SESSION Injecting BAD_SESSION 6 times Result: BAD_SESSION Injecting BAD_SESSION 5 times Result: SUCCESS Injecting BAD_SESSION 3 times Result: SUCCESS Injecting BAD_SESSION 0 times Result: SUCCESS Injecting SESSION_BUSY 10 times Result: SESSION_BUSY Injecting SESSION_BUSY 6 times Result: SESSION_BUSY Injecting SESSION_BUSY 5 times Result: SUCCESS Injecting SESSION_BUSY 3 times Result: SUCCESS Injecting SESSION_BUSY 0 times Result: SUCCESS Injecting NOT_FOUND 10 times Result: NOT_FOUND Injecting NOT_FOUND 6 times Result: NOT_FOUND Injecting NOT_FOUND 5 times Result: SUCCESS Injecting NOT_FOUND 3 times Result: SUCCESS Injecting NOT_FOUND 0 times Result: SUCCESS Injecting UNDETERMINED 10 times Result: UNDETERMINED Injecting UNDETERMINED 6 times Result: UNDETERMINED Injecting UNDETERMINED 5 times Result: SUCCESS Injecting UNDETERMINED 3 times Result: SUCCESS Injecting UNDETERMINED 0 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 10 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 6 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 5 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 3 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 0 times Result: SUCCESS >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_csv [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_tsv >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_tsv [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_pretty ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::ExecuteQueryCache [GOOD] Test command err: 2025-05-03T08:44:02.907371Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137504899875734:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:02.907538Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000ca/r3tmp/tmpTduhFE/pdisk_1.dat 2025-05-03T08:44:02.978415Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25706, node 1 2025-05-03T08:44:03.004257Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:03.004271Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:03.004272Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:03.004318Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:44:03.009471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:03.009517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:03.010993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:03.044982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:03.756443Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137509063193441:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:03.756476Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000ca/r3tmp/tmp40o3Qp/pdisk_1.dat 2025-05-03T08:44:03.771903Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16314, node 4 2025-05-03T08:44:03.792090Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:03.792105Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:03.792108Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:03.792151Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11165 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:03.856780Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:03.856812Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:03.858333Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:03.861086Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:04.071390Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137513358161689:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:04.071412Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137513358161679:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:04.071430Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:04.072046Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-03T08:44:04.075573Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7500137513358161693:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-03T08:44:04.175684Z node 4 :TX_PROXY ERROR: Actor# [4:7500137513358161766:2654] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:04.807248Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137513537429193:2248];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000ca/r3tmp/tmpmcLVKj/pdisk_1.dat 2025-05-03T08:44:04.812272Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:44:04.823656Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29212, node 7 2025-05-03T08:44:04.842526Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:04.842540Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:04.842542Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:04.842584Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1320 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:04.905925Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:04.905963Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:04.907554Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:04.910919Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:05.094220Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137517832397249:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:05.094243Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:05.096296Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:05.161449Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137517832397410:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:05.161479Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:05.161483Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137517832397415:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:05.162170Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:44:05.168343Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7500137517832397417:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:44:05.234306Z node 7 :TX_PROXY ERROR: Actor# [7:7500137517832397492:2774] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:05.258649Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan29831tvp1f13zprxkr47, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2I1ZDI4ODEtNzUzNDM5NS1lYjQxMmYyYS05YTM5YWVkYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:05.280345Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jtan298z9g9dyvyfr0xrzm2s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2I1ZDI4ODEtNzUzNDM5NS1lYjQxMmYyYS05YTM5YWVkYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:05.861476Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137517182106202:2076];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:05.861638Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000ca/r3tmp/tmpW4uNYW/pdisk_1.dat 2025-05-03T08:44:05.877449Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6561, node 10 2025-05-03T08:44:05.905217Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:05.905235Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:05.905237Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:05.905286Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31731 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-03T08:44:05.961620Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:05.961668Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:05.985299Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:05.990557Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:06.162761Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137521477074440:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:06.162781Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137521477074429:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:06.162839Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:06.163426Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-03T08:44:06.166923Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7500137521477074443:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-03T08:44:06.232568Z node 10 :TX_PROXY ERROR: Actor# [10:7500137521477074518:2649] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:06.913942Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137519165286846:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:06.913963Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000ca/r3tmp/tmpx3xQ37/pdisk_1.dat 2025-05-03T08:44:06.928957Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23561, node 13 2025-05-03T08:44:06.957086Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:06.957100Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:06.957102Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:06.957153Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30467 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:07.014315Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:07.014345Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:07.015819Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:07.017396Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:07.218648Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137523460255092:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:07.218670Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137523460255081:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:07.218688Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:07.219293Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-03T08:44:07.222998Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7500137523460255095:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-03T08:44:07.296968Z node 13 :TX_PROXY ERROR: Actor# [13:7500137523460255164:2646] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> YdbScripting::BasicV0 [GOOD] >> YdbScripting::BasicV1 >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_pretty [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_base64 >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex [GOOD] >> YdbYqlClient::ColumnFamiliesDescriptionWithStorageAndIndex >> test_dml.py::TestDML::test_dml[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_base64 [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_base64_array >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_base64_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_unicode >> TGRpcYdbTest::OperationTimeout [GOOD] >> TGRpcYdbTest::OperationCancelAfter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning [GOOD] Test command err: 2025-05-03T08:44:03.444564Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137510196566827:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:03.444588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000c6/r3tmp/tmpkuMcqM/pdisk_1.dat 2025-05-03T08:44:03.510765Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17950, node 1 2025-05-03T08:44:03.530023Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:03.530035Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:03.530048Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:03.530082Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15349 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-03T08:44:03.544958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:03.544980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:03.546484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:03.586634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:04.283774Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137511953989955:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:04.283795Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000c6/r3tmp/tmpZmzTFs/pdisk_1.dat 2025-05-03T08:44:04.297599Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27015, node 4 2025-05-03T08:44:04.311144Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:04.311159Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:04.311160Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:04.311202Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26493 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:04.384300Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:04.384330Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:04.385790Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:04.387306Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:04.598010Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:04.675782Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2025-05-03T08:44:04.690848Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found 2025-05-03T08:44:05.346034Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137516930853404:2212];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:05.348553Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000c6/r3tmp/tmpCI9WUw/pdisk_1.dat 2025-05-03T08:44:05.360813Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2216, node 7 2025-05-03T08:44:05.373975Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:05.373997Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:05.373999Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:05.374053Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61402 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:05.445634Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:05.445670Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:05.447080Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:05.449859Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:05.681188Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:06.397542Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137519207122199:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:06.397565Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000c6/r3tmp/tmpaBjUWV/pdisk_1.dat 2025-05-03T08:44:06.411507Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23555, node 10 2025-05-03T08:44:06.426096Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:06.426108Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:06.426109Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:06.426144Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:06.497732Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:06.497769Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:06.499193Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:06.499359Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:06.735184Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:07.453753Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137526015664287:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:07.453772Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000c6/r3tmp/tmpCw0pW8/pdisk_1.dat 2025-05-03T08:44:07.474090Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27579, node 13 2025-05-03T08:44:07.488761Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:07.488773Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:07.488775Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:07.488809Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17479 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:07.554498Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:07.554527Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:07.556085Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:07.559378Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:07.803031Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_unicode [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_unicode_array >> TTableProfileTests::ExplicitPartitionsComplex [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat >> YdbYqlClient::CopyTables [GOOD] >> YdbYqlClient::CreateAndAltertTableWithCompactionPolicy >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_unicode_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_csv >> YdbYqlClient::TestReadTableMultiShardOneRow [GOOD] >> YdbYqlClient::TestReadTableBatchLimits >> YdbIndexTable::AlterTableAddIndex [GOOD] >> YdbLogStore::AlterLogStore >> YdbTableBulkUpsert::AsyncIndexShouldFail [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldSucceed >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_csv [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_tsv >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_tsv [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_pretty >> YdbOlapStore::LogPagingBetween [GOOD] >> YdbOlapStore::LogWithUnionAllAscending >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_pretty [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_base64 >> YdbYqlClient::TestYqlTypesFromPreparedQuery [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_base64 [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_base64_array >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_base64_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_unicode >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_unicode [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_unicode_array ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestYqlTypesFromPreparedQuery [GOOD] Test command err: 2025-05-03T08:44:05.557378Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137515564030898:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:05.557418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000b3/r3tmp/tmpDFGnaD/pdisk_1.dat 2025-05-03T08:44:05.611655Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5117, node 1 2025-05-03T08:44:05.626730Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:05.626742Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:05.626743Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:05.626781Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8239 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:05.654035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:05.657932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:05.657958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:05.659515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:05.861581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137515564031843:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:05.861608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:05.900698Z node 1 :TX_PROXY ERROR: Actor# [1:7500137515564031864:2602] txid# 281474976710658, issues: { message: "Column Key has wrong key type Json" severity: 1 } 2025-05-03T08:44:05.905863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137515564031874:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:05.905894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:05.907753Z node 1 :TX_PROXY ERROR: Actor# [1:7500137515564031881:2612] txid# 281474976710659, issues: { message: "Column Key has wrong key type Yson" severity: 1 } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000b3/r3tmp/tmpPDEorm/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20319, node 4 TClient is connected to server localhost:61070 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000b3/r3tmp/tmplqhBWZ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16478, node 7 TClient is connected to server localhost:3964 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-03T08:44:08.129572Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137528282076249:2144];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:08.129792Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000b3/r3tmp/tmp2SqsZq/pdisk_1.dat 2025-05-03T08:44:08.147784Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21599, node 10 2025-05-03T08:44:08.168293Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:08.168304Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:08.168305Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:08.168327Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63237 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:08.229233Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:08.229261Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:08.230781Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:08.233470Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:08.405817Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137528282077105:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:08.405836Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137528282077116:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:08.405842Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:08.406483Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-03T08:44:08.409763Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7500137528282077119:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-03T08:44:08.486230Z node 10 :TX_PROXY ERROR: Actor# [10:7500137528282077194:2649] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> TGRpcYdbTest::OperationCancelAfter [GOOD] >> TGRpcYdbTest::KeepAlive >> YdbScripting::BasicV1 [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_unicode_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_csv >> YdbTableBulkUpsert::Uint8 [GOOD] >> YdbTableBulkUpsert::Timeout >> test_dml.py::TestDML::test_dml[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_csv [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_tsv >> YdbLogStore::AlterLogStore [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_tsv [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbScripting::BasicV1 [GOOD] Test command err: 2025-05-03T08:44:04.052755Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137511614128827:2076];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:04.052869Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000c2/r3tmp/tmpBR0QiX/pdisk_1.dat 2025-05-03T08:44:04.115162Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24447, node 1 2025-05-03T08:44:04.136780Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:04.136791Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:04.136792Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:04.136853Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12278 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:04.188748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:04.188778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:04.190236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:04.190676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:04.339199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:04.517381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137511614131704:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:04.517389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137511614131714:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:04.517463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:04.517987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:44:04.520991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500137511614131720:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:44:04.619428Z node 1 :TX_PROXY ERROR: Actor# [1:7500137511614131795:4110] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:04.671096Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan28h4595mpakxh03n75va, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmNjZmY4ZWYtY2Q5NTFkMi00YTZiODlmZS1mOWEzZDQ4Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS 2025-05-03T08:44:05.295456Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137515528145526:2079];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:05.295829Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000c2/r3tmp/tmp4haAfu/pdisk_1.dat 2025-05-03T08:44:05.312595Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31504, node 4 2025-05-03T08:44:05.330729Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:05.330744Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:05.330746Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:05.330789Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4091 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:05.396106Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:05.396136Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:05.397805Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:05.399352Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:05.598454Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:05.640271Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137515528148349:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:05.640280Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137515528148338:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:05.640294Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:05.640909Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:44:05.643989Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7500137515528148352:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:44:05.706449Z node 4 :TX_PROXY ERROR: Actor# [4:7500137515528148425:4088] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:05.718661Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan29m7b7htqhrwkmm2dm1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Y2M5YmJmYzMtMjcyYjFlZjctNWNlNmE4OTMtZWIzZjFkNWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:05.725490Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:4091 2025-05-03T08:44:06.346974Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137520266559242:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:06.347080Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000c2/r3tmp/tmp05G1ZY/pdisk_1.dat 2025-05-03T08:44:06.363522Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8611, node 7 2025-05-03T08:44:06.384771Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:06.384783Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:06.384785Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:06.384818Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14170 WaitRootIsUp 'Ro ... maybe) 2025-05-03T08:44:07.449739Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:07.449739Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:07.449772Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:07.515740Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:07.515767Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:07.517303Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:07.519036Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:07.764999Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137523556350948:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:07.765023Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:07.775166Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:07.834523Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137523556351112:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:07.834543Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:07.834563Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137523556351117:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:07.835124Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:44:07.842379Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7500137523556351119:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:44:07.931001Z node 10 :TX_PROXY ERROR: Actor# [10:7500137523556351192:2769] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:07.937123Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan2bfc7w39bd35p0a1qpmz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NzNkYWFmODQtYWFkOTRlY2ItNzQzY2RjMTMtMTA1MTBhMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:07.952938Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jtan2bw58cnpzsqpt30pttad, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZDgzNzExMWMtOWRjYjY0MTItMzI5YTVkYmEtNTM5NGRhZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:07.954329Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1746261847995, txId: 281474976715662] shutting down 2025-05-03T08:44:08.473432Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137531587785892:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:08.473454Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000c2/r3tmp/tmpqPEi9u/pdisk_1.dat 2025-05-03T08:44:08.491569Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10884, node 13 2025-05-03T08:44:08.511204Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:08.511215Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:08.511217Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:08.511264Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14904 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:08.574072Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:08.574116Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:08.575604Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:08.577224Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:08.758545Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137531587786812:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:08.758569Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:08.766552Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:08.824923Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137531587786991:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:08.824945Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7500137531587786996:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:08.824952Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:08.825540Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:44:08.828768Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7500137531587786998:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:44:08.896757Z node 13 :TX_PROXY ERROR: Actor# [13:7500137531587787071:2763] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:08.903328Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan2cgfejzeabgyca1n7acn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NDM2YmZlZGUtNzc3ZTRiMTAtZjZkYzllMzEtOTQzOWM4ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:08.918146Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jtan2ctaed665x9tkvwe617f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzdiZmRhMmItMmY0ZWZiZmMtMTkzMzI4ZTktMzgyMTRlZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:08.919438Z node 13 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1746261848961, txId: 281474976715662] shutting down >> YdbYqlClient::CreateAndAltertTableWithCompactionPolicy [GOOD] >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogStore [GOOD] Test command err: 2025-05-03T08:44:05.334583Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137516464173491:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:05.334617Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000b7/r3tmp/tmpKkI6MP/pdisk_1.dat 2025-05-03T08:44:05.409150Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9410, node 1 2025-05-03T08:44:05.423548Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:05.423561Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:05.423562Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:05.423599Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:44:05.435135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:05.435172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:05.436811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32134 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:05.482099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:05.654162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 SUCCESS 3 rows in 0.002633s 2025-05-03T08:44:05.720144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137516464174599:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:05.720158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137516464174588:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:05.720172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:05.720801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:44:05.724462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500137516464174602:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:44:05.800722Z node 1 :TX_PROXY ERROR: Actor# [1:7500137516464174677:2775] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:05.876829Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan29pqa4mz0nchvzpb428g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWI3MjFmMTMtMTIxNzJlZDAtMmFmYTdhZmQtNjEwYzdhNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 3 rows 2025-05-03T08:44:06.373218Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137521379007600:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:06.373237Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000b7/r3tmp/tmpOx6sa1/pdisk_1.dat 2025-05-03T08:44:06.385480Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20895, node 4 2025-05-03T08:44:06.410898Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:06.410913Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:06.410915Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:06.410965Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13626 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:06.473462Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:06.473490Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:06.474870Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:06.476446Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:13626 2025-05-03T08:44:06.667837Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:06.731373Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: Root/Foo/TimestampIndex/indexImplTable, pathId: , opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-03T08:44:06.731405Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusNameConflict, reason: Check failed: path: '/Root/Foo/TimestampIndex/indexImplTable', error: path is not a common path (id: [OwnerId: 72057594046644480, LocalPathId: 4], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:521, at schemeshard: 72057594046644480 2025-05-03T08:44:06.731870Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusNameConflict, reason: Check failed: path: '/Root/Foo/TimestampIndex/indexImplTable', error: path is not a common path (id: [OwnerId: 72057594046644480, LocalPathId: 4], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:521, operation: ALTER TABLE, path: Root/Foo/TimestampIndex/indexImplTable 2025-05-03T08:44:06.731929Z node 4 :TX_PROXY ERROR: Actor# [4:7500137521379008928:2891] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/Foo/TimestampIndex/indexImplTable\', error: path is not a common path (id: [OwnerId: 72057594046644480, LocalPathId: 4], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:521" severity: 1 } Error 128: Administrative access denied TClient::Ls request: /Root/Foo/TimestampIndex/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1746261846770 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "Timestamp" Type: "Int64" TypeId: 3 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false ... (TRUNCATED) 2025-05-03T08:44:06.735109Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTableIndex Propose, path: /Root/Foo/TimestampIndex, operationId: 281474976715660:0, transaction: WorkingDir: "/Root/Foo" OperationType: ESchemeOpAlterTableIndex AlterTableIndex { Name: "TimestampIndex" State: EIndexStateReady } Internal: false, at schemeshard: 72057594046644480 2025-05-03T08:44:06.735156Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-03T08:44:06.735162Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Foo/TimestampIndex/indexImplTable, pathId: , opId: 281474976715660:1, at schemeshard: 72057594046644480 2025-05-0 ... ding or dropping columns in index table is not supported 2025-05-03T08:44:06.769430Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTableIndex Propose, path: /Root/Foo/TimestampIndex, operationId: 281474976715663:0, transaction: WorkingDir: "/Root/Foo" OperationType: ESchemeOpAlterTableIndex AlterTableIndex { Name: "TimestampIndex" State: EIndexStateReady } Internal: false, at schemeshard: 72057594046644480 2025-05-03T08:44:06.769470Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715663:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-03T08:44:06.769475Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Foo/TimestampIndex/indexImplTable, pathId: , opId: 281474976715663:1, at schemeshard: 72057594046644480 2025-05-03T08:44:06.769492Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715663:2, propose status:StatusInvalidParameter, reason: Adding or dropping columns in index table is not supported, at schemeshard: 72057594046644480 2025-05-03T08:44:06.770131Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Abort operation: IgniteOperation fail to propose a part, opId: 281474976715663:1, at schemeshard: 72057594046644480, already accepted parts: 1, propose result status: StatusInvalidParameter, with reason: Adding or dropping columns in index table is not supported, tx message: Transaction { WorkingDir: "/Root/Foo/TimestampIndex" OperationType: ESchemeOpAlterTable AlterTable { Name: "indexImplTable" DropColumns { Name: "Timestamp" } } } TxId: 281474976715663 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" 2025-05-03T08:44:06.770166Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTableIndex AbortPropose, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-03T08:44:06.770705Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715663, database: /Root, subject: root@builtin, status: StatusInvalidParameter, reason: Adding or dropping columns in index table is not supported, operation: ALTER TABLE, path: /Root/Foo/TimestampIndex/indexImplTable 2025-05-03T08:44:06.770751Z node 4 :TX_PROXY ERROR: Actor# [4:7500137521379009087:3032] txid# 281474976715663, issues: { message: "Adding or dropping columns in index table is not supported" severity: 1 } Error 128: Adding or dropping columns in index table is not supported 2025-05-03T08:44:07.417019Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137523398883623:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:07.417081Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000b7/r3tmp/tmplaGF62/pdisk_1.dat 2025-05-03T08:44:07.428991Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4075, node 7 2025-05-03T08:44:07.448914Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:07.448925Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:07.448926Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:07.448955Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4214 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:07.517456Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:07.517483Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:07.518878Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:07.520500Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:07.534671Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:08.071327Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137531624104962:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:08.071345Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000b7/r3tmp/tmpKKvRDe/pdisk_1.dat 2025-05-03T08:44:08.091573Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62638, node 10 2025-05-03T08:44:08.105848Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:08.105863Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:08.105864Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:08.105898Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:08.171809Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:08.171837Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:08.173185Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:08.174873Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:08.184936Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:08.248656Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-05-03T08:44:08.267078Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-05-03T08:44:08.942658Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137528485524632:2080];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:08.942693Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000b7/r3tmp/tmpRZQ6LR/pdisk_1.dat 2025-05-03T08:44:08.960302Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8814, node 13 2025-05-03T08:44:08.986563Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:08.986577Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:08.986579Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:08.986616Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11159 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:09.042691Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:09.042719Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:09.044092Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:09.045762Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyType >> YdbYqlClient::TestReadTableBatchLimits [GOOD] >> YdbYqlClient::ColumnFamiliesDescriptionWithStorageAndIndex [GOOD] >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile >> TGRpcYdbTest::KeepAlive [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableBatchLimits [GOOD] Test command err: 2025-05-03T08:44:05.754374Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137518382121717:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:05.754396Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000a8/r3tmp/tmpMzY6O1/pdisk_1.dat 2025-05-03T08:44:05.807772Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19562, node 1 2025-05-03T08:44:05.820071Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:05.820083Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:05.820086Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:05.820122Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31858 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:05.854748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:05.854783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:05.856414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:05.884225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:05.895204Z node 1 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jtan29w6bkxk6z2301bm6gy2, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:54534, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.998833s 2025-05-03T08:44:05.898354Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jtan29w86trdambq47v9vmh3, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:54534, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-05-03T08:44:06.057435Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01jtan2a19ar73e6qa7bzx8pnh, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:54534, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-05-03T08:44:06.057539Z node 1 :TX_PROXY DEBUG: actor# [1:7500137518382121945:2140] Handle TEvProposeTransaction 2025-05-03T08:44:06.057549Z node 1 :TX_PROXY DEBUG: actor# [1:7500137518382121945:2140] TxId# 281474976715658 ProcessProposeTransaction 2025-05-03T08:44:06.057564Z node 1 :TX_PROXY DEBUG: actor# [1:7500137518382121945:2140] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7500137522677089963:2602] 2025-05-03T08:44:06.065191Z node 1 :TX_PROXY DEBUG: Actor# [1:7500137522677089963:2602] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Test" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Fk" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" KeyColumnNames: "Fk" UniformPartitionsCount: 16 PartitionConfig { } Temporary: false } CreateIndexedTable { } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:54534" 2025-05-03T08:44:06.065207Z node 1 :TX_PROXY DEBUG: Actor# [1:7500137522677089963:2602] txid# 281474976715658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-03T08:44:06.065302Z node 1 :TX_PROXY DEBUG: Actor# [1:7500137522677089963:2602] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-05-03T08:44:06.065319Z node 1 :TX_PROXY DEBUG: Actor# [1:7500137522677089963:2602] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-05-03T08:44:06.065367Z node 1 :TX_PROXY DEBUG: Actor# [1:7500137522677089963:2602] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-03T08:44:06.065400Z node 1 :TX_PROXY DEBUG: Actor# [1:7500137522677089963:2602] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-03T08:44:06.065408Z node 1 :TX_PROXY DEBUG: Actor# [1:7500137522677089963:2602] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-05-03T08:44:06.065465Z node 1 :TX_PROXY DEBUG: Actor# [1:7500137522677089963:2602] txid# 281474976715658 HANDLE EvClientConnected 2025-05-03T08:44:06.066226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:06.066815Z node 1 :TX_PROXY DEBUG: Actor# [1:7500137522677089963:2602] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-05-03T08:44:06.066827Z node 1 :TX_PROXY DEBUG: Actor# [1:7500137522677089963:2602] txid# 281474976715658 SEND to# [1:7500137522677089962:2333] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-05-03T08:44:06.067027Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:44:06.067027Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:44:06.067042Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:44:06.067043Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:44:06.075690Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137522677090025:2662], Recipient [1:7500137522677090175:2337]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:44:06.075701Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137522677090029:2666], Recipient [1:7500137522677090177:2339]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:44:06.075950Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137522677090032:2669], Recipient [1:7500137522677090190:2345]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:44:06.075986Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137522677090020:2657], Recipient [1:7500137522677090219:2350]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:44:06.076029Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137522677090028:2665], Recipient [1:7500137522677090224:2351]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:44:06.076052Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137522677090024:2661], Recipient [1:7500137522677090203:2348]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:44:06.076100Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137522677090017:2654], Recipient [1:7500137522677090178:2340]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:44:06.076162Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137522677090031:2668], Recipient [1:7500137522677090201:2347]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:44:06.076184Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137522677090023:2660], Recipient [1:7500137522677090197:2346]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:44:06.076251Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137522677090030:2667], Recipient [1:7500137522677090179:2341]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:44:06.076260Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137522677090022:2659], Recipient [1:7500137522677090186:2342]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:44:06.076378Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137522677090018:2655], Recipient [1:7500137522677090188:2343]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:44:06.076404Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137522677090019:2656], Recipient [1:7500137522677090189:2344]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:44:06.076436Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137522677090027:2664], Recipient [1:7500137522677090218:2349]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:44:06.076463Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137522677090026:2663], Recipient [1:7500137522677090176:2338]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:44:06.076530Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7500137522677090021:2658], Recipient [1:7500137522677090174:2336]: NKikimr::TEvTablet::TEvBoot 2025-05-03T08:44:06.079357Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7500137522677090031:2668], Recipient [1:7500137522677090201:2347]: NKikimr::TEvTablet::TEvRestored 2025-05-03T08:44:06.079382Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7500137522677090025:2662], Recipient [1:7500137522677090175:2337]: NKikimr::TEvTablet::TEvRestored 2025-05-03T08:44:06.079479Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:7500137522677090201:2347] 2025-05-03T08:44:06.079495Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037899 actor [1:7500137522677090175:2337] 2025-05-03T08:44:06.079605Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-05-03T08:44:06.079618Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-05-03T08:44:06.080661Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:7500137522677090025:2662], Recipient [1:7500137522677090175:2337]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-05-03T08:44:06.080675Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7500137522677090030:2667], Recipient [1:7500137522677090179:2341]: NKikimr::TEvTablet::TEvRestored 2025-05-03T08:44:06.080684Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:7500137522677090031:2668], Recipient [1:7500137522677090201:2347]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025 ... 79] at 72075186224037897 on unit ReadTableScan 2025-05-03T08:44:09.533976Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715679] at 72075186224037897 is Continue 2025-05-03T08:44:09.533978Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-05-03T08:44:09.533979Z node 10 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037897 2025-05-03T08:44:09.533980Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-05-03T08:44:09.533981Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-05-03T08:44:09.533993Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-05-03T08:44:09.534021Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [10:7500137532731723519:2163], Recipient [10:7500137532731721833:2344]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-05-03T08:44:09.534023Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-05-03T08:44:09.534023Z node 10 :READ_TABLE_API DEBUG: [10:7500137532731723423:2394] Adding quota request to queue ShardId: 0, TxId: 281474976715678 2025-05-03T08:44:09.534024Z node 10 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037897, TxId: 281474976715679, MessageQuota: 25 2025-05-03T08:44:09.534031Z node 10 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037897, TxId: 281474976715679, MessageQuota: 25 2025-05-03T08:44:09.534037Z node 10 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037897 2025-05-03T08:44:09.534038Z node 10 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715679, at: 72075186224037897 2025-05-03T08:44:09.534045Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [10:7500137532731721833:2344], Recipient [10:7500137532731721833:2344]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-05-03T08:44:09.534047Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-05-03T08:44:09.534050Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037897 2025-05-03T08:44:09.534052Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 active 1 active planned 0 immediate 1 planned 0 2025-05-03T08:44:09.534054Z node 10 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715679] at 72075186224037897 for ReadTableScan 2025-05-03T08:44:09.534055Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715679] at 72075186224037897 on unit ReadTableScan 2025-05-03T08:44:09.534057Z node 10 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715679] at 72075186224037897 error: , IsFatalError: 0 2025-05-03T08:44:09.534059Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715679] at 72075186224037897 is Executed 2025-05-03T08:44:09.534065Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715679] at 72075186224037897 executing on unit ReadTableScan 2025-05-03T08:44:09.534067Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715679] at 72075186224037897 to execution unit FinishPropose 2025-05-03T08:44:09.534068Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715679] at 72075186224037897 on unit FinishPropose 2025-05-03T08:44:09.534070Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715679] at 72075186224037897 is DelayComplete 2025-05-03T08:44:09.534072Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715679] at 72075186224037897 executing on unit FinishPropose 2025-05-03T08:44:09.534073Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715679] at 72075186224037897 to execution unit CompletedOperations 2025-05-03T08:44:09.534075Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715679] at 72075186224037897 on unit CompletedOperations 2025-05-03T08:44:09.534077Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715679] at 72075186224037897 is Executed 2025-05-03T08:44:09.534096Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715679] at 72075186224037897 executing on unit CompletedOperations 2025-05-03T08:44:09.534097Z node 10 :TX_DATASHARD TRACE: Execution plan for [0:281474976715679] at 72075186224037897 has finished 2025-05-03T08:44:09.534099Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-03T08:44:09.534100Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2025-05-03T08:44:09.534101Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-05-03T08:44:09.534102Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-05-03T08:44:09.534106Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-05-03T08:44:09.534107Z node 10 :TX_DATASHARD TRACE: Complete execution for [0:281474976715679] at 72075186224037897 on unit FinishPropose 2025-05-03T08:44:09.534110Z node 10 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715679 at tablet 72075186224037897 send to client, exec latency: 4 ms, propose latency: 4 ms, status: COMPLETE 2025-05-03T08:44:09.534121Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037897 2025-05-03T08:44:09.534209Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7500137532731723424:2394], Recipient [10:7500137532731721811:2337]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1746261849577 TxId: 281474976715678 2025-05-03T08:44:09.534241Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7500137532731723424:2394], Recipient [10:7500137532731721831:2342]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1746261849577 TxId: 281474976715678 2025-05-03T08:44:09.534257Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7500137532731723424:2394], Recipient [10:7500137532731721817:2340]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1746261849577 TxId: 281474976715678 2025-05-03T08:44:09.534270Z node 10 :READ_TABLE_API NOTICE: [10:7500137532731723423:2394] Finish grpc stream, status: 400000 2025-05-03T08:44:09.534273Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7500137532731723424:2394], Recipient [10:7500137532731721830:2341]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1746261849577 TxId: 281474976715678 2025-05-03T08:44:09.534273Z node 10 :READ_TABLE_API DEBUG: [10:7500137532731723423:2394] Send zero quota to Shard 0, TxId 281474976715678 2025-05-03T08:44:09.534276Z node 10 :READ_TABLE_API DEBUG: [10:7500137532731723423:2394] Send zero quota to Shard 0, TxId 281474976715678 2025-05-03T08:44:09.534277Z node 10 :READ_TABLE_API DEBUG: [10:7500137532731723423:2394] Send zero quota to Shard 0, TxId 281474976715678 2025-05-03T08:44:09.534279Z node 10 :READ_TABLE_API DEBUG: [10:7500137532731723423:2394] Send zero quota to Shard 0, TxId 281474976715678 2025-05-03T08:44:09.534292Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7500137532731723424:2394], Recipient [10:7500137532731721813:2339]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1746261849577 TxId: 281474976715678 2025-05-03T08:44:09.534307Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7500137532731723424:2394], Recipient [10:7500137532731721832:2343]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1746261849577 TxId: 281474976715678 2025-05-03T08:44:09.534324Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7500137532731723424:2394], Recipient [10:7500137532731721812:2338]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1746261849577 TxId: 281474976715678 2025-05-03T08:44:09.534339Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7500137532731723424:2394], Recipient [10:7500137532731721810:2336]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1746261849577 TxId: 281474976715678 2025-05-03T08:44:09.534340Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7500137532731723424:2394], Recipient [10:7500137532731721837:2345]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1746261849577 TxId: 281474976715678 2025-05-03T08:44:09.534362Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7500137532731723424:2394], Recipient [10:7500137532731721833:2344]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1746261849577 TxId: 281474976715678 2025-05-03T08:44:09.541918Z node 10 :GRPC_SERVER DEBUG: [0x549ebf6dae00] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-05-03T08:44:09.541992Z node 10 :GRPC_SERVER DEBUG: [0x549ebf6ec100] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-05-03T08:44:09.542024Z node 10 :GRPC_SERVER DEBUG: [0x549ebf6dbd00] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-05-03T08:44:09.542040Z node 10 :GRPC_SERVER DEBUG: [0x549ebf6dfe00] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-05-03T08:44:09.542050Z node 10 :GRPC_SERVER DEBUG: [0x549ebf6e0d00] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-05-03T08:44:09.542069Z node 10 :GRPC_SERVER DEBUG: [0x549ebf6d0e00] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-05-03T08:44:09.542093Z node 10 :GRPC_SERVER DEBUG: [0x549ebf6d1d00] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-05-03T08:44:09.542117Z node 10 :GRPC_SERVER DEBUG: [0x549ebf6d0400] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-05-03T08:44:09.542118Z node 10 :GRPC_SERVER DEBUG: [0x549ebf6e9e00] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-05-03T08:44:09.542143Z node 10 :GRPC_SERVER DEBUG: [0x549ebf6cdc00] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-05-03T08:44:09.542143Z node 10 :GRPC_SERVER DEBUG: [0x549ebf6e4400] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-05-03T08:44:09.542171Z node 10 :GRPC_SERVER DEBUG: [0x549ebf6d8100] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-05-03T08:44:09.542172Z node 10 :GRPC_SERVER DEBUG: [0x549ebf6e6200] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-05-03T08:44:09.542197Z node 10 :GRPC_SERVER DEBUG: [0x549ebf6d9f00] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-05-03T08:44:09.542198Z node 10 :GRPC_SERVER DEBUG: [0x549ebf6dc200] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-05-03T08:44:09.542220Z node 10 :GRPC_SERVER DEBUG: [0x549ebeec8980] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-05-03T08:44:09.542223Z node 10 :GRPC_SERVER DEBUG: [0x549ebf6e3000] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] >> YdbTableBulkUpsert::Timeout [GOOD] >> YdbTableBulkUpsert::ZeroRows >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). |96.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::KeepAlive [GOOD] Test command err: 2025-05-03T08:44:06.216932Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137519322587600:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:06.216949Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000093/r3tmp/tmpuk2POn/pdisk_1.dat 2025-05-03T08:44:06.264045Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25055, node 1 2025-05-03T08:44:06.283312Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:06.283323Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:06.283324Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:06.283364Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27220 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:06.303970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:06.317599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:06.317624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:06.319102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:06.325761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-03T08:44:07.028649Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137525374026690:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:07.028687Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000093/r3tmp/tmpAa06qJ/pdisk_1.dat 2025-05-03T08:44:07.043234Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8575, node 4 2025-05-03T08:44:07.072038Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:07.072050Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:07.072052Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:07.072096Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1571 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:07.129221Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:07.129280Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:07.130730Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:07.131328Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:07.846898Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137523832433498:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:07.846927Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000093/r3tmp/tmpd3WY2S/pdisk_1.dat 2025-05-03T08:44:07.870735Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11232, node 7 2025-05-03T08:44:07.889734Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:07.889746Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:07.889748Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:07.889789Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19679 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:07.947362Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:07.947400Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:07.948908Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:07.950488Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting...
: Error: Operation timeout. 2025-05-03T08:44:08.696616Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137531199443574:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:08.696688Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000093/r3tmp/tmpl0O0Y2/pdisk_1.dat 2025-05-03T08:44:08.713966Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2051, node 10 2025-05-03T08:44:08.728796Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:08.728808Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:08.728810Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:08.728847Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:08.796779Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:08.796810Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:08.798225Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:08.799819Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting...
: Error: Operation cancelled. 2025-05-03T08:44:09.561207Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137532113797924:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:09.561232Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000093/r3tmp/tmpVP1eK8/pdisk_1.dat 2025-05-03T08:44:09.577968Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7948, node 13 2025-05-03T08:44:09.596613Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:09.596626Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:09.596629Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:09.596690Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19667 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:09.661603Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:09.661637Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:09.663157Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:09.664664Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] Test command err: 2025-05-03T08:44:04.677302Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137512788897239:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:04.677319Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000bb/r3tmp/tmpbscDQU/pdisk_1.dat 2025-05-03T08:44:04.730966Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20993, node 1 2025-05-03T08:44:04.748801Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:04.748821Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:04.748823Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:04.748874Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30316 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:04.773270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:04.777653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:04.777677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:04.779056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:05.057546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137517083865474:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:05.057577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:05.110188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:05.173800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137517083865639:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:05.173821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:05.173838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137517083865644:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:05.174474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-05-03T08:44:05.183214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500137517083865646:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-05-03T08:44:05.254706Z node 1 :TX_PROXY ERROR: Actor# [1:7500137517083865715:2766] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:05.260527Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jtan28shavmtr39w78kw5sv9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBkNDQ3NjYtNThiNDc1NzktODhiODA5NmYtMmFlNjI5ZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:05.275026Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jtan28shavmtr39w78kw5sv9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzk3MDYwM2QtYzY0YjI0N2QtZjZlNmRlZGEtZDMyMWFkYTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:05.281124Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jtan28shavmtr39w78kw5sv9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzljNDIwZWEtYWJjNGE1YTItYjkwMmFkZmItN2M0NjdjNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:05.297455Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jtan28shavmtr39w78kw5sv9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTcxMzA3NDEtMWJiYWFiODYtOWRjMDFhNy0xN2Q1NDdkMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:05.922875Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137517846974077:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:05.922899Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000bb/r3tmp/tmpazIByV/pdisk_1.dat 2025-05-03T08:44:05.939823Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22000, node 4 2025-05-03T08:44:05.971785Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:05.971799Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:05.971801Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:05.971845Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17199 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:06.023483Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:06.023517Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:06.024864Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:06.026080Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:06.028719Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-03T08:44:06.233884Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137522141942302:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:06.233903Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:06.241128Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:06.300299Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137522141942484:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:06.300317Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137522141942489:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:06.300322Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:06.300925Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:44:06.309456Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7500137522141942491:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: { Disconnected 2025-05-03T08:44:07.080205Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:07.097282Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:07.098576Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:07.266147Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Decimal(22,9) value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Date value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Datetime value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Timestamp value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Interval value CLIENT_INTERNAL_ERROR
: Error: GRpc error: (13): Unable to parse request
: Error: Grpc error response on endpoint localhost:6908 BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Yson value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Json value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid JSON for JsonDocument provided: TAPE_ERROR: The JSON document has an improper structure: missing or superfluous commas, braces, missing keys, etc. This is a fatal and unrecoverable error. BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid DyNumber string representation 2025-05-03T08:44:07.833449Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137525855319332:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:07.833488Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000bb/r3tmp/tmpiwtsAp/pdisk_1.dat 2025-05-03T08:44:07.848263Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27540, node 10 2025-05-03T08:44:07.878109Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:07.878122Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:07.878124Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:07.878162Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29562 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:07.934238Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:07.934268Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:07.937528Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:07.937688Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:08.153530Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/ui8' Only async-indexed tables are supported by BulkUpsert
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable' unknown table 2025-05-03T08:44:08.891042Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137531367411995:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:08.891072Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000bb/r3tmp/tmp8SMH85/pdisk_1.dat 2025-05-03T08:44:08.907585Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61597, node 13 2025-05-03T08:44:08.930661Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:08.930674Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:08.930676Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:08.930740Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31338 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-05-03T08:44:08.994095Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:08.994129Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:08.994962Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:44:08.995861Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:09.175795Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable' unknown table ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter [GOOD] Test command err: 2025-05-03T08:44:05.898151Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137516726505751:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:05.898214Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000a2/r3tmp/tmpwBOTm6/pdisk_1.dat 2025-05-03T08:44:05.954146Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17969, node 1 2025-05-03T08:44:05.972360Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:05.972373Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:05.972374Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:05.972407Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:05.998220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:05.998252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:05.999701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:06.030099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:06.037348Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-03T08:44:06.044598Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:42752 Call 2025-05-03T08:44:06.046410Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:42752 2025-05-03T08:44:06.221659Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:42752 Call Call 2025-05-03T08:44:06.226970Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, user is a admin, database: /Root, user: root@builtin, from ip: ipv6:[::1]:42752 2025-05-03T08:44:06.228542Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, user is a admin, database: /Root, user: root@builtin, from ip: ipv6:[::1]:42762 2025-05-03T08:44:06.228916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:06.735671Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137523110552729:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:06.735693Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000a2/r3tmp/tmpIN177h/pdisk_1.dat 2025-05-03T08:44:06.752397Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1201, node 4 2025-05-03T08:44:06.769416Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:06.769436Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:06.769438Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:06.769477Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8562 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:06.835855Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:06.835882Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:06.837407Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:06.838924Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:07.594210Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137526370308116:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:07.594228Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000a2/r3tmp/tmpQIIvYt/pdisk_1.dat 2025-05-03T08:44:07.605129Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9583, node 7 2025-05-03T08:44:07.619748Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:07.619757Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:07.619759Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:07.619812Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7799 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:07.694553Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:07.694591Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:07.696103Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:07.697604Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:07.872158Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Table-1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:07.872421Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-03T08:44:07.872435Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:07.872990Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Table-1 2025-05-03T08:44:07.929419Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1746261847974, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-03T08:44:07.936265Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-05-03T08:44:07.937850Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-2, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-03T08:44:07.938034Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-03T08:44:07.938593Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: CREATE TABLE COPY FROM, path: /Root/Table-2 2025-05-03T08:44:07.946665Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1746261847995, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-03T08:44:07.949094Z node 7 :FLAT_TX_SCHEMESHARD NOTICE ... 4:08.250080Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-7, pathId: 0, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-05-03T08:44:08.250116Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715687:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-03T08:44:08.250428Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715687, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-7 2025-05-03T08:44:08.251862Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037894 not found 2025-05-03T08:44:08.252222Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-03T08:44:08.260821Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1746261848310, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-03T08:44:08.261766Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715687, done: 0, blocked: 1 2025-05-03T08:44:08.262674Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715687:0 2025-05-03T08:44:08.264173Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-8, pathId: 0, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-05-03T08:44:08.264209Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715688:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-03T08:44:08.264688Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715688, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-8 2025-05-03T08:44:08.266174Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037892 not found 2025-05-03T08:44:08.266514Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-03T08:44:08.267621Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1746261848317, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-03T08:44:08.268181Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715688, done: 0, blocked: 1 2025-05-03T08:44:08.268898Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715688:0 2025-05-03T08:44:08.272390Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037893 not found 2025-05-03T08:44:08.272688Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-03T08:44:08.847615Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137529362205272:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:08.847640Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000a2/r3tmp/tmpU6KMKT/pdisk_1.dat 2025-05-03T08:44:08.861684Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15239, node 10 2025-05-03T08:44:08.876923Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:08.876939Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:08.876941Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:08.876983Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5284 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:08.948489Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:08.948521Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:08.949804Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:08.951298Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:5284 2025-05-03T08:44:09.145375Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:5284 TClient::Ls request: Root/Test TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Test" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1746261849248 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Test" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) 2025-05-03T08:44:09.223625Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:5284 TClient::Ls request: Root/Test TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Test" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1746261849248 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Test" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) 2025-05-03T08:44:09.905653Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137533157869500:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:09.905690Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000a2/r3tmp/tmp6RgXGS/pdisk_1.dat 2025-05-03T08:44:09.922868Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1872, node 13 2025-05-03T08:44:09.934503Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:09.934515Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:09.934517Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:09.934555Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:10.006179Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:10.006206Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:10.007714Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:10.009329Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:10.202570Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:10.264402Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> TTableProfileTests::ExplicitPartitionsWrongKeyType [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] >> test_dml.py::TestDML::test_dml[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] >> YdbTableBulkUpsert::ZeroRows [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TTableProfileTests::ExplicitPartitionsWrongKeyType [GOOD] Test command err: 2025-05-03T08:44:04.049546Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137513100360365:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:04.049611Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000c4/r3tmp/tmpGndgwZ/pdisk_1.dat 2025-05-03T08:44:04.102938Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18245, node 1 2025-05-03T08:44:04.123413Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:04.123428Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:04.123430Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:04.123473Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-03T08:44:04.149748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:04.149787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:04.151189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:04.179871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:26251 2025-05-03T08:44:04.205533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:04.210504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:04.711620Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500137510870129909:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:04.711647Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:44:04.713425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:04.713449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:04.714668Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-05-03T08:44:04.714918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26251 2025-05-03T08:44:04.744176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:26251 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1746261845250 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-05-03T08:44:05.229881Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-05-03T08:44:05.230079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-03T08:44:05.899898Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137516325166922:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:05.899920Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000c4/r3tmp/tmp8vi04f/pdisk_1.dat 2025-05-03T08:44:05.912859Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28263, node 4 2025-05-03T08:44:05.934758Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:05.934772Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:05.934774Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:05.934835Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:06.000088Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:06.000123Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:06.001620Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:06.003244Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:12746 2025-05-03T08:44:06.036092Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:06.039413Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:06.540548Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7500137521425013420:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:06.540589Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:44:06.542218Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:06.542241Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:06.543589Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-05-03T08:44:06.543820Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12746 2025-05-03T08:44:06.567006Z node 4 :TX_PROXY ERROR: Actor# [4:7500137520620135506:2851] txid# 281474976715660, issues: { message: "Partition ranges are not sorted at index 2" severity: 1 } 2025-05-03T08:44:06.567826Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 6 2025-05-03T08:44:06.567916Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-03T08:44:06.868718Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-03T08:44:06.868805Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7500137521425013758:2304], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-03T08:44:07.151443Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137526343030687:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:07.151474Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000c4/r3tmp/tmp4LHG7y/pdisk_1.dat 2025-05-03T08:44:07.165468Z node 7 :IMPORT WARN ... 1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1746261848280 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "ke... (TRUNCATED) 2025-05-03T08:44:08.261052Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-05-03T08:44:08.261189Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-03T08:44:08.809979Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137527816029736:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:08.810001Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000c4/r3tmp/tmp61Pa7U/pdisk_1.dat 2025-05-03T08:44:08.822300Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20627, node 10 2025-05-03T08:44:08.841862Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:08.841876Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:08.841878Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:08.841940Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28273 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:08.910368Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:08.910399Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:08.911859Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:08.913780Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:28273 2025-05-03T08:44:08.950728Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:08.955809Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:09.456886Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7500137532468522325:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:09.456984Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:44:09.458530Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:09.458556Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:09.459845Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-05-03T08:44:09.460060Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28273 2025-05-03T08:44:09.485987Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-05-03T08:44:09.486096Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-03T08:44:09.740312Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7500137532468522653:2304], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-05-03T08:44:09.740344Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-03T08:44:10.071172Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137538188479579:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:10.071198Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000c4/r3tmp/tmpOQ9CC3/pdisk_1.dat 2025-05-03T08:44:10.092087Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22167, node 13 2025-05-03T08:44:10.113832Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:10.113849Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:10.113852Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:10.113894Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8794 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:10.171856Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:10.171898Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:10.173466Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:10.181493Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:8794 2025-05-03T08:44:10.212640Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:10.216164Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:10.717491Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7500137536775706300:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:10.717589Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:44:10.719546Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:10.719573Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:10.720913Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-05-03T08:44:10.721170Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8794 2025-05-03T08:44:10.751905Z node 13 :TX_PROXY ERROR: Actor# [13:7500137538188480871:2855] txid# 281474976715660, issues: { message: "Error at split boundary 0: Value of type Uint64 expected in tuple at position 1" severity: 1 } 2025-05-03T08:44:10.753055Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-05-03T08:44:10.753182Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-03T08:44:11.014856Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-05-03T08:44:11.015193Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:7500137541070673937:2304], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::ZeroRows [GOOD] Test command err: 2025-05-03T08:44:05.675836Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137518479095110:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:05.675854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000ae/r3tmp/tmpFqpDnw/pdisk_1.dat 2025-05-03T08:44:05.734629Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64195, node 1 2025-05-03T08:44:05.748185Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:05.748198Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:05.748200Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:05.748239Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8415 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:05.775954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:05.775982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:05.777466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:05.811408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:05.978547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 TIMEOUT CLIENT_DEADLINE_EXCEEDED 2025-05-03T08:44:06.048451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137522774065295:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:06.048457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137522774065286:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:06.048470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:06.049268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:44:06.052193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500137522774065300:2440], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:44:06.141463Z node 1 :TX_PROXY ERROR: Actor# [1:7500137522774065373:4147] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:06.206079Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan2a103p10h58p855d04kn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWVkNGQyNTUtYzBiZmYzOGEtY2I5OWJhYzYtMWMwYzk3Zjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:06.726040Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137522479462492:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:06.726077Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000ae/r3tmp/tmpgBYkKJ/pdisk_1.dat 2025-05-03T08:44:06.746052Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21622, node 4 2025-05-03T08:44:06.765403Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:06.765429Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:06.765431Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:06.765481Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10566 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:06.826616Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:06.826647Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:06.828140Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:06.829743Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:07.020707Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:07.580076Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137523955184281:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:07.580099Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000ae/r3tmp/tmprwboxW/pdisk_1.dat 2025-05-03T08:44:07.599593Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22294, node 7 2025-05-03T08:44:07.614399Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:07.614413Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:07.614415Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:07.614460Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12346 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:07.680613Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:07.680648Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:07.682181Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:07.683701Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:07.869881Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:09.657161Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137535279138991:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:09.657334Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000ae/r3tmp/tmpWkNXU9/pdisk_1.dat 2025-05-03T08:44:09.705770Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8840, node 10 2025-05-03T08:44:09.731381Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:09.731395Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:09.731398Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:09.731450Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8729 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:09.757800Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:09.757833Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:09.759304Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:09.788349Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:09.961357Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 1 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 2 usec
: Error: Bulk upsert to table '/Root/ui32' Deadline exceeded 4 usec
: Error: Bulk upsert to table '/Root/ui32' Deadline exceeded 8 usec
: Error: Bulk upsert to table '/Root/ui32' Deadline exceeded 16 usec
: Error: Bulk upsert to table '/Root/ui32' Deadline exceeded 32 usec
: Error: Bulk upsert to table '/Root/ui32' Deadline exceeded 64 usec
: Error: Bulk upsert to table '/Root/ui32' Deadline exceeded 128 usec
: Error: Bulk upsert to table '/Root/ui32' Deadline exceeded 256 usec
: Error: Bulk upsert to table '/Root/ui32' Deadline exceeded 512 usec
: Error: Bulk upsert to table '/Root/ui32' Deadline exceeded 1024 usec
: Error: Bulk upsert to table '/Root/ui32' Deadline exceeded 2048 usec 2025-05-03T08:44:10.720595Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137539458530259:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:10.720617Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000ae/r3tmp/tmpg5B1Zv/pdisk_1.dat 2025-05-03T08:44:10.734413Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19125, node 13 2025-05-03T08:44:10.754358Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:10.754370Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:10.754385Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:10.754428Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3748 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:10.821302Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:10.821341Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:10.822854Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:10.824445Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:11.004040Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile [GOOD] >> YdbYqlClient::CheckDefaultTableSettings3 >> ttl_unavailable_s3.py::TestUnavailableS3::test >> test_distconf.py::TestKiKiMRDistConfBasic::test_cluster_expand_with_distconf >> test_dml.py::TestDML::test_dml[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete >> YdbYqlClient::CheckDefaultTableSettings3 [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::CheckDefaultTableSettings3 [GOOD] Test command err: 2025-05-03T08:44:05.991903Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137516685684525:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:05.991921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00009c/r3tmp/tmpqjs7HC/pdisk_1.dat 2025-05-03T08:44:06.046677Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7111, node 1 2025-05-03T08:44:06.072261Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:06.072272Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:06.072274Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:06.072307Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-03T08:44:06.092247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:06.092282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:06.093789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63644 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:06.110706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:06.802539Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137522429476306:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:06.802558Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00009c/r3tmp/tmpGkzPpb/pdisk_1.dat 2025-05-03T08:44:06.815540Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14505, node 4 2025-05-03T08:44:06.829605Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:06.829617Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:06.829618Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:06.829669Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23086 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:06.903222Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:06.903257Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:06.904721Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:06.907890Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:23086 2025-05-03T08:44:06.932278Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:06.932358Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-03T08:44:06.932367Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:06.932858Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-05-03T08:44:06.934600Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1746261846980, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-03T08:44:06.934994Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-05-03T08:44:06.935008Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2025-05-03T08:44:06.935135Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2025-05-03T08:44:06.935947Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-03T08:44:06.936066Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-03T08:44:06.936074Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-03T08:44:06.936372Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-05-03T08:44:07.436985Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7500137525177344200:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:07.437020Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:44:07.439061Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:07.439092Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:07.439385Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ObjectStorage, PostgreSQL 2025-05-03T08:44:07.440424Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-05-03T08:44:07.440665Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:07.455867Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-03T08:44:07.456185Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-03T08:44:07.456190Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-03T08:44:07.456240Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTableIndex Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex, operationId: 281474976715660:1, transaction: WorkingDir: "/Root/ydb_ut_tenant/Table-1" OperationType: ESchemeOpCreateTableIndex CreateTableIndex { Name: "MyIndex" KeyColumnNames: "Value" Type: EIndexTypeGlobal IndexImplTableDescriptions { } } Internal: false FailOnExist: false AllowCreateInTempDir: false, at schemeshard: 72057594046644480 2025-05-03T08:44:07.456267Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-03T08:44:07.456271Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex/indexImplTable, opId: 281474976715660:2, at schemeshard: 72057594046644480 2025-05-03T08:44:07.456382Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-03T08:44:07.457159Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root/ydb_ut_tenant, subject: , status: StatusAccepted, operation: CREATE TABLE WITH INDEXES, path: /Root/ydb_ut_tenant/Table-1 2025-05-03T08:44:07.870205Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1746261847918, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-03T08:44:07.870839Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-05-03T08:44:07.870875Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2025-05-03T08:44:07.871002Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2025-05-03T08:44:07.928422Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1746261847970, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-03T08:44:07.936539Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 20 ... hard: 72057594046644480 2025-05-03T08:44:09.640154Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-05-03T08:44:09.640182Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:1 2025-05-03T08:44:09.640186Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:2 2025-05-03T08:44:09.642909Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-05-03T08:44:09.643017Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-03T08:44:10.337428Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137539661600304:2258];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:10.337458Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00009c/r3tmp/tmpaTRN2M/pdisk_1.dat 2025-05-03T08:44:10.358328Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13000, node 10 2025-05-03T08:44:10.373200Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:10.373216Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:10.373218Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:10.373254Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62614 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:10.438582Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:10.438614Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:10.440158Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:10.446974Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:62614 2025-05-03T08:44:10.474094Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:10.474205Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-03T08:44:10.474218Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:10.474726Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-05-03T08:44:10.476699Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1746261850522, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-03T08:44:10.477172Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-05-03T08:44:10.477195Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2025-05-03T08:44:10.477353Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2025-05-03T08:44:10.478206Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-03T08:44:10.478347Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-03T08:44:10.478356Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-03T08:44:10.478725Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-05-03T08:44:10.979556Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7500137536649116858:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:10.979579Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:44:10.982039Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:10.982075Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:10.982952Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-05-03T08:44:10.983202Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:10.990927Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-03T08:44:10.991282Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-03T08:44:10.991302Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-03T08:44:10.991764Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root/ydb_ut_tenant, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/ydb_ut_tenant/Table-1 2025-05-03T08:44:11.354158Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1746261851397, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-03T08:44:11.354730Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-05-03T08:44:11.354771Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2025-05-03T08:44:11.354905Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2025-05-03T08:44:11.412010Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1746261851460, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-03T08:44:11.413775Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-05-03T08:44:11.416821Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-05-03T08:44:11.416940Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-03T08:44:12.012839Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7500137545142424367:2212];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:12.012944Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00009c/r3tmp/tmpuzoUHB/pdisk_1.dat 2025-05-03T08:44:12.029917Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2181, node 13 2025-05-03T08:44:12.044790Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:12.044805Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:12.044807Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:12.044861Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26280 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:12.116884Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:12.116915Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:12.117866Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-03T08:44:12.118211Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-03T08:44:12.307322Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change >> YdbOlapStore::LogWithUnionAllAscending [GOOD] >> YdbOlapStore::LogWithUnionAllDescending |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types7-all_types7-index7---] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types13-all_types13-index13-Date--] >> YdbYqlClient::RetryOperationAsync [GOOD] >> YdbYqlClient::QueryLimits >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint32 >> test_ydb_backup.py::TestBaseSingleFromDifPlaces::test_single_table_backup_from_different_places >> test_generator.py::TestTpchGenerator::test_s1_parts [GOOD] >> test_dml.py::TestDML::test_dml[table_all_types-pk_types12-all_types12-index12---] [GOOD] |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpchGenerator::test_s1_parts [GOOD] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_full_stats >> YdbYqlClient::QueryLimits [GOOD] >> YdbYqlClient::QueryStats >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_all_types-pk_types12-all_types12-index12---] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint32 [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint64_and_string >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint64_and_string [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_list >> YdbOlapStore::LogWithUnionAllDescending [GOOD] >> YdbOlapStore::LogTsRangeDescending >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_list [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_struct >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_struct [GOOD] >> YdbOlapStore::BulkUpsert [GOOD] >> YdbOlapStore::DuplicateRows >> test_ydb_backup.py::TestBaseSingleFromDifPlaces::test_single_table_backup_from_different_places [GOOD] >> YdbYqlClient::QueryStats [GOOD] >> YdbYqlClient::RenameTables >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_full_stats [GOOD] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_profile_stats >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_profile_stats [GOOD] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_basic_stats >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_basic_stats [GOOD] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_to_dir >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_to_dir [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_all_types-pk_types12-all_types12-index12---] >> YdbYqlClient::RenameTables [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::RenameTables [GOOD] Test command err: 2025-05-03T08:44:04.359975Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137512740944631:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:04.360033Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000bf/r3tmp/tmpiGMrHl/pdisk_1.dat 2025-05-03T08:44:04.414269Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27247, node 1 2025-05-03T08:44:04.434553Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:04.434564Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:04.434566Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:04.434595Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21118 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:04.455713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:04.460486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:04.460515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:04.461957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 2025-05-03T08:44:09.360249Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7500137512740944631:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:09.360288Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 2025-05-03T08:44:09.754951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137534215782207:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:09.754958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137534215782198:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:09.754971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:09.755659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-05-03T08:44:09.759386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500137534215782212:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-05-03T08:44:09.820232Z node 1 :TX_PROXY ERROR: Actor# [1:7500137534215782287:2691] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 2025-05-03T08:44:16.115246Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137564756212249:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:16.115556Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000bf/r3tmp/tmptFYiR1/pdisk_1.dat 2025-05-03T08:44:16.133703Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5176, node 4 2025-05-03T08:44:16.156169Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:16.156180Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:16.156182Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:16.156223Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2047 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:16.214079Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:16.214121Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:16.215479Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:16.219854Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:16.462652Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137564756213197:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:16.462688Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:16.465656Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:16.534243Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137564756213362:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:16.534263Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:16.534280Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137564756213367:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:16.534915Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:44:16.538322Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7500137564756213369:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:44:16.615693Z node 4 :TX_PROXY ERROR: Actor# [4:7500137564756213444:2776] txid# 281474976715660, ... 13 :TX_PROXY DEBUG: Actor# [13:7500137582798423916:3521] txid# 281474976715672 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "Root" OperationType: ESchemeOpDropTable Drop { Name: "Table-1" } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:33312" 2025-05-03T08:44:20.149380Z node 13 :TX_PROXY DEBUG: Actor# [13:7500137582798423916:3521] txid# 281474976715672 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-03T08:44:20.149392Z node 13 :TX_PROXY DEBUG: Actor# [13:7500137582798423916:3521] txid# 281474976715672 TEvNavigateKeySet requested from SchemeCache 2025-05-03T08:44:20.149466Z node 13 :TX_PROXY DEBUG: Actor# [13:7500137582798423916:3521] txid# 281474976715672 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-03T08:44:20.149489Z node 13 :TX_PROXY DEBUG: Actor# [13:7500137582798423916:3521] HANDLE EvNavigateKeySetResult, txid# 281474976715672 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-03T08:44:20.149500Z node 13 :TX_PROXY DEBUG: Actor# [13:7500137582798423916:3521] txid# 281474976715672 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715672 TabletId# 72057594046644480} 2025-05-03T08:44:20.149527Z node 13 :TX_PROXY DEBUG: Actor# [13:7500137582798423916:3521] txid# 281474976715672 HANDLE EvClientConnected 2025-05-03T08:44:20.149572Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-1, pathId: 0, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-05-03T08:44:20.149604Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715672:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-03T08:44:20.149926Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-03T08:44:20.150041Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715672, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-1 2025-05-03T08:44:20.150067Z node 13 :TX_PROXY DEBUG: Actor# [13:7500137582798423916:3521] txid# 281474976715672 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715672} 2025-05-03T08:44:20.150073Z node 13 :TX_PROXY DEBUG: Actor# [13:7500137582798423916:3521] txid# 281474976715672 SEND to# [13:7500137582798423914:2388] Source {TEvProposeTransactionStatus txid# 281474976715672 Status# 53} 2025-05-03T08:44:20.150258Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:44:20.150272Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:44:20.150283Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:44:20.150290Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:44:20.153993Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1746261860203, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-03T08:44:20.154741Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715672, done: 0, blocked: 1 2025-05-03T08:44:20.155416Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:44:20.155441Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:44:20.155444Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:44:20.155455Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:44:20.155658Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715672:0 2025-05-03T08:44:20.156988Z node 13 :GRPC_SERVER DEBUG: Got grpc request# DropTableRequest, traceId# 01jtan2qsw47c2psh3drc30fzh, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33312, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-05-03T08:44:20.157104Z node 13 :TX_PROXY DEBUG: actor# [13:7500137578503454741:2114] Handle TEvProposeTransaction 2025-05-03T08:44:20.157118Z node 13 :TX_PROXY DEBUG: actor# [13:7500137578503454741:2114] TxId# 281474976715673 ProcessProposeTransaction 2025-05-03T08:44:20.157127Z node 13 :TX_PROXY DEBUG: actor# [13:7500137578503454741:2114] Cookie# 0 userReqId# "" txid# 281474976715673 SEND to# [13:7500137582798423998:3598] 2025-05-03T08:44:20.157832Z node 13 :TX_PROXY DEBUG: Actor# [13:7500137582798423998:3598] txid# 281474976715673 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "Root" OperationType: ESchemeOpDropTable Drop { Name: "Table-2" } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:33312" 2025-05-03T08:44:20.157848Z node 13 :TX_PROXY DEBUG: Actor# [13:7500137582798423998:3598] txid# 281474976715673 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-03T08:44:20.157860Z node 13 :TX_PROXY DEBUG: Actor# [13:7500137582798423998:3598] txid# 281474976715673 TEvNavigateKeySet requested from SchemeCache 2025-05-03T08:44:20.157953Z node 13 :TX_PROXY DEBUG: Actor# [13:7500137582798423998:3598] txid# 281474976715673 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-03T08:44:20.157979Z node 13 :TX_PROXY DEBUG: Actor# [13:7500137582798423998:3598] HANDLE EvNavigateKeySetResult, txid# 281474976715673 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-05-03T08:44:20.157985Z node 13 :TX_PROXY DEBUG: Actor# [13:7500137582798423998:3598] txid# 281474976715673 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715673 TabletId# 72057594046644480} 2025-05-03T08:44:20.158019Z node 13 :TX_PROXY DEBUG: Actor# [13:7500137582798423998:3598] txid# 281474976715673 HANDLE EvClientConnected 2025-05-03T08:44:20.158051Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-2, pathId: 0, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-05-03T08:44:20.158085Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715673:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-03T08:44:20.158602Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715673, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-2 2025-05-03T08:44:20.158658Z node 13 :TX_PROXY DEBUG: Actor# [13:7500137582798423998:3598] txid# 281474976715673 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715673} 2025-05-03T08:44:20.158672Z node 13 :TX_PROXY DEBUG: Actor# [13:7500137582798423998:3598] txid# 281474976715673 SEND to# [13:7500137582798423996:2391] Source {TEvProposeTransactionStatus txid# 281474976715673 Status# 53} 2025-05-03T08:44:20.158817Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:44:20.158836Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:44:20.158839Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:44:20.158853Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:44:20.159503Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 13, TabletId: 72075186224037890 not found 2025-05-03T08:44:20.159839Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-03T08:44:20.168230Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1746261860217, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-03T08:44:20.169141Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715673, done: 0, blocked: 1 2025-05-03T08:44:20.169858Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:44:20.169886Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:44:20.169956Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-05-03T08:44:20.169969Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-05-03T08:44:20.170540Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715673:0 2025-05-03T08:44:20.171722Z node 13 :GRPC_SERVER DEBUG: [0x15ee3f6e3a00] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-05-03T08:44:20.171796Z node 13 :GRPC_SERVER DEBUG: [0x15ee3f6e1c00] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-05-03T08:44:20.171849Z node 13 :GRPC_SERVER DEBUG: [0x15ee3f6e3500] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-05-03T08:44:20.171852Z node 13 :GRPC_SERVER DEBUG: [0x15ee3f6e8000] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-05-03T08:44:20.171886Z node 13 :GRPC_SERVER DEBUG: [0x15ee3f6ea800] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-05-03T08:44:20.171904Z node 13 :GRPC_SERVER DEBUG: [0x15ee3f6e8500] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-05-03T08:44:20.171933Z node 13 :GRPC_SERVER DEBUG: [0x15ee3f6e3f00] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-05-03T08:44:20.171998Z node 13 :GRPC_SERVER DEBUG: [0x15ee3f6e9400] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-05-03T08:44:20.172016Z node 13 :GRPC_SERVER DEBUG: [0x15ee3f6ea300] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-05-03T08:44:20.172054Z node 13 :GRPC_SERVER DEBUG: [0x15ee3f6d5900] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-05-03T08:44:20.172055Z node 13 :GRPC_SERVER DEBUG: [0x15ee3f6c1e00] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-05-03T08:44:20.172083Z node 13 :GRPC_SERVER DEBUG: [0x15ee3f6c3200] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-05-03T08:44:20.172089Z node 13 :GRPC_SERVER DEBUG: [0x15ee3f6d1d00] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-05-03T08:44:20.172122Z node 13 :GRPC_SERVER DEBUG: [0x15ee3f6d7200] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-05-03T08:44:20.172132Z node 13 :GRPC_SERVER DEBUG: [0x15ee3f6dae00] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-05-03T08:44:20.172154Z node 13 :GRPC_SERVER DEBUG: [0x15ee3ecd4480] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-05-03T08:44:20.172183Z node 13 :GRPC_SERVER DEBUG: [0x15ee3f6eb200] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-05-03T08:44:20.174668Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 13, TabletId: 72075186224037889 not found 2025-05-03T08:44:20.175124Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] >> YdbOlapStore::LogTsRangeDescending [GOOD] |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] >> YdbYqlClient::RetryOperationLimitedDuration [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::RetryOperationLimitedDuration [GOOD] Test command err: 2025-05-03T08:43:56.751207Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137478989018373:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:56.751438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f4/r3tmp/tmpG6pbtu/pdisk_1.dat 2025-05-03T08:43:56.802650Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63636, node 1 2025-05-03T08:43:56.824175Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:56.824184Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:56.824185Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:56.824213Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5588 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:56.851285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:56.851313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:56.851743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-05-03T08:43:56.852585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-03T08:43:57.060698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137483283986618:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:57.060728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:57.100454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:57.167993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-03T08:43:57.172853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137483283986813:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:57.172873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137483283986818:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:57.172879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:57.173507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-05-03T08:43:57.177055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500137483283986820:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-05-03T08:43:57.251821Z node 1 :TX_PROXY ERROR: Actor# [1:7500137483283986893:2793] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:57.256435Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7500137483283986914:2803], for# test_user@builtin, access# DescribeSchema 2025-05-03T08:43:57.256454Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7500137483283986914:2803], for# test_user@builtin, access# DescribeSchema 2025-05-03T08:43:57.258321Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7500137483283986911:2361], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:21: Error: At function: KiReadTable!
:2:21: Error: Cannot find table 'db.[Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-05-03T08:43:57.258414Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzE0MTkzNzAtYmNhOTFhOGItYTFlYjIwZWItZDVlNzRkZjE=, ActorId: [1:7500137483283986809:2351], ActorState: ExecuteState, TraceId: 01jtan21bm30vrwzf29y3dmznf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-05-03T08:43:57.804620Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137481844873489:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:57.804677Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f4/r3tmp/tmphaNDuO/pdisk_1.dat 2025-05-03T08:43:57.818178Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11597, node 4 2025-05-03T08:43:57.831043Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:57.831058Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:57.831060Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:57.831112Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9584 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:57.905056Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:57.905089Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:57.906531Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:57.907692Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:58.111568Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137486139841732:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:58.111611Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:58.112807Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:58.173609Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137486139841901:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:58.173627Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137486139841906:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:58.173629Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:58.174267Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:58.182540Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7500137486139841908:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:43:58.283380Z node 4 :TX_PROXY ERROR: Actor# [4:7500137486139841983:2781] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:58.295182Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan22ax0pxtga8jeg3400td, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MzBiMmUwMGYtMzMwOWEyYTktODU5Y2E3ZWYtZjJlZWJkY2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:58.311289Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jtan22ev210dvdq7g86v31cn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZWUzM2YwZTEtN2YwOWQ2NmEtZWJjOTkwYS1kMzBjM2FkZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:58.847632Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137486404385750:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:58.847654Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f4/r3tmp/tmp6MbNoz/pdisk_1.dat 2025-05-03T08:43:58.860873Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22730, node 7 2025-05-03T08:43:58.874949Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:58.874961Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:58.874963Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:58.874993Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29390 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:58.947937Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:58.947962Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:58.949643Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:58.951437Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 2025-05-03T08:44:01.306125Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137499289288644:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:01.306153Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:01.306269Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137499289288656:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:01.306994Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-05-03T08:44:01.314986Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7500137499289288658:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-05-03T08:44:01.387161Z node 7 :TX_PROXY ERROR: Actor# [7:7500137499289288738:2678] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 2025-05-03T08:44:03.925604Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137506620830974:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:03.925646Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f4/r3tmp/tmpIteDk7/pdisk_1.dat 2025-05-03T08:44:03.946537Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28511, node 10 2025-05-03T08:44:03.966348Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:03.966360Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:03.966362Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:03.966389Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27821 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:04.026463Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:04.026495Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:04.028001Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:04.029096Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 2 of 3 2025-05-03T08:44:08.926739Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7500137506620830974:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:08.926770Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 3 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 2 of 3 2025-05-03T08:44:18.943430Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-05-03T08:44:18.943446Z node 10 :IMPORT WARN: Table profiles were not loaded Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 3 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogTsRangeDescending [GOOD] Test command err: 2025-05-03T08:43:59.702653Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137492147981212:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:59.702672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000ea/r3tmp/tmpRDdBJp/pdisk_1.dat 2025-05-03T08:43:59.759670Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28931, node 1 2025-05-03T08:43:59.778279Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:59.778293Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:59.778295Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:59.778337Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3913 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-03T08:43:59.803187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:59.803216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:59.804651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:59.837841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:3913 2025-05-03T08:43:59.881235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "OlapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } Columns { Name: "request_id" Type: "Utf8" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" } } } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-03T08:43:59.881313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/OlapStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:59.881470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: OlapStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-05-03T08:43:59.881490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-05-03T08:43:59.881500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-03T08:43:59.881505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-03T08:43:59.881521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-05-03T08:43:59.881527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-05-03T08:43:59.881616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-05-03T08:43:59.881992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2025-05-03T08:43:59.882074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-03T08:43:59.882085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:59.882107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-05-03T08:43:59.882120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-05-03T08:43:59.882793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusAccepted TxId: 281474976715658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-05-03T08:43:59.882842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/OlapStore 2025-05-03T08:43:59.882916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-03T08:43:59.882926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-05-03T08:43:59.882956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-05-03T08:43:59.882977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-03T08:43:59.882985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500137492147981802:2381], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 1 2025-05-03T08:43:59.883005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500137492147981802:2381], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 2 2025-05-03T08:43:59.883018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:59.883023Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateOlapStore, at tablet# 72057594046644480 waiting... 2025-05-03T08:43:59.883195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-03T08:43:59.883278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } Bi ... d: default}. Compute actor has finished execution: [28:7500137585814802886:3137] 2025-05-03T08:44:21.115937Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7500137585814802822:3081] TxId: 281474976715670. Ctx: { TraceId: 01jtan2rnz5fj7cdvzgbzspm3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTk4MTU5YTUtYjJlNGFmYjYtOTIyOTYyNjAtNzg3NjU1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7500137585814802890:3140], CA [28:7500137585814802903:3151], CA [28:7500137585814802891:3141], CA [28:7500137585814802887:3138], CA [28:7500137585814802888:3139], CA [28:7500137585814802892:3142], 2025-05-03T08:44:21.115954Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7500137585814802822:3081] TxId: 281474976715670. Ctx: { TraceId: 01jtan2rnz5fj7cdvzgbzspm3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTk4MTU5YTUtYjJlNGFmYjYtOTIyOTYyNjAtNzg3NjU1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7500137585814802887:3138], task: 52, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 175 Tasks { TaskId: 52 CpuTimeUs: 92 FinishTimeMs: 1746261861115 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 14 BuildCpuTimeUs: 78 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-26z4vhl2yy" NodeId: 28 CreateTimeMs: 1746261861105 UpdateTimeMs: 1746261861115 } MaxMemoryUsage: 1048576 } 2025-05-03T08:44:21.115957Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 53. Tasks execution finished 2025-05-03T08:44:21.115958Z node 28 :KQP_EXECUTER INFO: TxId: 281474976715670. Ctx: { TraceId: 01jtan2rnz5fj7cdvzgbzspm3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTk4MTU5YTUtYjJlNGFmYjYtOTIyOTYyNjAtNzg3NjU1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7500137585814802887:3138] 2025-05-03T08:44:21.115958Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7500137585814802888:3139], TxId: 281474976715670, task: 53. Ctx: { TraceId : 01jtan2rnz5fj7cdvzgbzspm3t. SessionId : ydb://session/3?node_id=28&id=OTk4MTU5YTUtYjJlNGFmYjYtOTIyOTYyNjAtNzg3NjU1ZmU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-05-03T08:44:21.115962Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7500137585814802822:3081] TxId: 281474976715670. Ctx: { TraceId: 01jtan2rnz5fj7cdvzgbzspm3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTk4MTU5YTUtYjJlNGFmYjYtOTIyOTYyNjAtNzg3NjU1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7500137585814802890:3140], CA [28:7500137585814802903:3151], CA [28:7500137585814802891:3141], CA [28:7500137585814802888:3139], CA [28:7500137585814802892:3142], 2025-05-03T08:44:21.115964Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 53. pass away 2025-05-03T08:44:21.115969Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715670;task_id=53;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-05-03T08:44:21.115969Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=OTk4MTU5YTUtYjJlNGFmYjYtOTIyOTYyNjAtNzg3NjU1ZmU=, ActorId: [28:7500137585814802801:3081], ActorState: ExecuteState, TraceId: 01jtan2rnz5fj7cdvzgbzspm3t, Forwarded TEvStreamData to [28:7500137585814802799:3080] 2025-05-03T08:44:21.115984Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7500137585814802822:3081] TxId: 281474976715670. Ctx: { TraceId: 01jtan2rnz5fj7cdvzgbzspm3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTk4MTU5YTUtYjJlNGFmYjYtOTIyOTYyNjAtNzg3NjU1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7500137585814802888:3139], task: 53, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 166 Tasks { TaskId: 53 CpuTimeUs: 83 FinishTimeMs: 1746261861115 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 15 BuildCpuTimeUs: 68 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-26z4vhl2yy" NodeId: 28 CreateTimeMs: 1746261861106 UpdateTimeMs: 1746261861115 } MaxMemoryUsage: 1048576 } 2025-05-03T08:44:21.115987Z node 28 :KQP_EXECUTER INFO: TxId: 281474976715670. Ctx: { TraceId: 01jtan2rnz5fj7cdvzgbzspm3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTk4MTU5YTUtYjJlNGFmYjYtOTIyOTYyNjAtNzg3NjU1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7500137585814802888:3139] 2025-05-03T08:44:21.115991Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7500137585814802822:3081] TxId: 281474976715670. Ctx: { TraceId: 01jtan2rnz5fj7cdvzgbzspm3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTk4MTU5YTUtYjJlNGFmYjYtOTIyOTYyNjAtNzg3NjU1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7500137585814802890:3140], CA [28:7500137585814802903:3151], CA [28:7500137585814802891:3141], CA [28:7500137585814802892:3142], 2025-05-03T08:44:21.116001Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 54. Tasks execution finished 2025-05-03T08:44:21.116002Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7500137585814802890:3140], TxId: 281474976715670, task: 54. Ctx: { CustomerSuppliedId : . TraceId : 01jtan2rnz5fj7cdvzgbzspm3t. SessionId : ydb://session/3?node_id=28&id=OTk4MTU5YTUtYjJlNGFmYjYtOTIyOTYyNjAtNzg3NjU1ZmU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-05-03T08:44:21.116011Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 54. pass away 2025-05-03T08:44:21.116017Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715670;task_id=54;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-05-03T08:44:21.116042Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 55. Tasks execution finished 2025-05-03T08:44:21.116044Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7500137585814802891:3141], TxId: 281474976715670, task: 55. Ctx: { TraceId : 01jtan2rnz5fj7cdvzgbzspm3t. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=28&id=OTk4MTU5YTUtYjJlNGFmYjYtOTIyOTYyNjAtNzg3NjU1ZmU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-05-03T08:44:21.116050Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 55. pass away 2025-05-03T08:44:21.116055Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715670;task_id=55;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-05-03T08:44:21.116089Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 56. Tasks execution finished 2025-05-03T08:44:21.116091Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7500137585814802892:3142], TxId: 281474976715670, task: 56. Ctx: { CustomerSuppliedId : . TraceId : 01jtan2rnz5fj7cdvzgbzspm3t. SessionId : ydb://session/3?node_id=28&id=OTk4MTU5YTUtYjJlNGFmYjYtOTIyOTYyNjAtNzg3NjU1ZmU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-05-03T08:44:21.116096Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 56. pass away 2025-05-03T08:44:21.116103Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715670;task_id=56;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-05-03T08:44:21.116183Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7500137585814802822:3081] TxId: 281474976715670. Ctx: { TraceId: 01jtan2rnz5fj7cdvzgbzspm3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTk4MTU5YTUtYjJlNGFmYjYtOTIyOTYyNjAtNzg3NjU1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7500137585814802890:3140], task: 54, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 184 Tasks { TaskId: 54 CpuTimeUs: 85 FinishTimeMs: 1746261861115 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 18 BuildCpuTimeUs: 67 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-26z4vhl2yy" NodeId: 28 CreateTimeMs: 1746261861104 UpdateTimeMs: 1746261861116 } MaxMemoryUsage: 1048576 } 2025-05-03T08:44:21.116187Z node 28 :KQP_EXECUTER INFO: TxId: 281474976715670. Ctx: { TraceId: 01jtan2rnz5fj7cdvzgbzspm3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTk4MTU5YTUtYjJlNGFmYjYtOTIyOTYyNjAtNzg3NjU1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7500137585814802890:3140] 2025-05-03T08:44:21.116192Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7500137585814802822:3081] TxId: 281474976715670. Ctx: { TraceId: 01jtan2rnz5fj7cdvzgbzspm3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTk4MTU5YTUtYjJlNGFmYjYtOTIyOTYyNjAtNzg3NjU1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7500137585814802903:3151], CA [28:7500137585814802891:3141], CA [28:7500137585814802892:3142], 2025-05-03T08:44:21.116207Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7500137585814802822:3081] TxId: 281474976715670. Ctx: { TraceId: 01jtan2rnz5fj7cdvzgbzspm3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTk4MTU5YTUtYjJlNGFmYjYtOTIyOTYyNjAtNzg3NjU1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7500137585814802891:3141], task: 55, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 188 Tasks { TaskId: 55 CpuTimeUs: 88 FinishTimeMs: 1746261861116 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 15 BuildCpuTimeUs: 73 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-26z4vhl2yy" NodeId: 28 CreateTimeMs: 1746261861104 UpdateTimeMs: 1746261861116 } MaxMemoryUsage: 1048576 } 2025-05-03T08:44:21.116210Z node 28 :KQP_EXECUTER INFO: TxId: 281474976715670. Ctx: { TraceId: 01jtan2rnz5fj7cdvzgbzspm3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTk4MTU5YTUtYjJlNGFmYjYtOTIyOTYyNjAtNzg3NjU1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7500137585814802891:3141] 2025-05-03T08:44:21.116214Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7500137585814802822:3081] TxId: 281474976715670. Ctx: { TraceId: 01jtan2rnz5fj7cdvzgbzspm3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTk4MTU5YTUtYjJlNGFmYjYtOTIyOTYyNjAtNzg3NjU1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7500137585814802903:3151], CA [28:7500137585814802892:3142], 2025-05-03T08:44:21.116228Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7500137585814802822:3081] TxId: 281474976715670. Ctx: { TraceId: 01jtan2rnz5fj7cdvzgbzspm3t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=OTk4MTU5YTUtYjJlNGFmYjYtOTIyOTYyNjAtNzg3NjU1ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7500137585814802892:3142], task: 56, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 175 Tasks { TaskId: 56 CpuTimeUs: 86 FinishTimeMs: 1746261861116 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 17 BuildCpuTimeUs: 69 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-26z4vhl2yy" NodeId: 28 CreateTimeMs: 1746261861104 UpdateTimeMs: 1746261861116 } MaxMemoryUsage: 1048576 } >> YdbYqlClient::TestExplicitPartitioning [GOOD] >> test_generator.py::TestTpchGenerator::test_s1 [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal3510-pk_types27-all_types27-index27---] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestExplicitPartitioning [GOOD] Test command err: 2025-05-03T08:43:54.369301Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137468285233891:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:54.369333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000107/r3tmp/tmpWB7kkj/pdisk_1.dat 2025-05-03T08:43:54.427731Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16053, node 1 2025-05-03T08:43:54.446008Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:54.446019Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:54.446021Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:54.446057Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29972 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-03T08:43:54.469711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:54.469746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-05-03T08:43:54.471214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:54.500855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:54.662511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137468285234836:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:54.662530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:54.698564Z node 1 :TX_PROXY ERROR: Actor# [1:7500137468285234858:2605] txid# 281474976715658, issues: { message: "Column Key has wrong key type Double" severity: 1 } 2025-05-03T08:43:55.218142Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137473967579267:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:55.218173Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000107/r3tmp/tmpthFCBq/pdisk_1.dat 2025-05-03T08:43:55.231727Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3709, node 4 2025-05-03T08:43:55.244980Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:55.244991Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:55.244993Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:55.245024Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9221 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:55.318595Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:55.318647Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:55.319980Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:55.321624Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:55.540952Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137473967580226:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:55.540987Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:55.544066Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137473967580251:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:55.546090Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:55.549085Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:55.550239Z node 4 :TX_PROXY ERROR: Actor# [4:7500137473967580305:2619] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:484" severity: 1 } 2025-05-03T08:43:55.550292Z node 4 :TX_PROXY ERROR: Actor# [4:7500137473967580308:2620] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:484" severity: 1 } 2025-05-03T08:43:55.550382Z node 4 :TX_PROXY ERROR: Actor# [4:7500137473967580322:2628] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:484" severity: 1 } 2025-05-03T08:43:55.550411Z node 4 :TX_PROXY ERROR: Actor# [4:7500137473967580319:2625] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:484" severity: 1 } 2025-05-03T08:43:55.550428Z node 4 :TX_PROXY ERROR: Actor# [4:7500137473967580317:2623] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:484" severity: 1 } 2025-05-03T08:43:55.550430Z node 4 :TX_PROXY ERROR: Actor# [4:7500137473967580318:2624] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:484" severity: 1 } 2025-05-03T08:43:55.550446Z node 4 :TX_PROXY ERROR: Actor# [4:7500137473967580321:2627] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:484" severity: 1 } 2025-05-03T08:43:55.550462Z node 4 :TX_PROXY ERROR: Actor# [4:7500137473967580320:2626] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:484" severity: 1 } 2025-05-03T08:43:55.550496Z node 4 :TX_PROXY ERROR: Actor# [4:7500137473967580315:2621] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:484" severity: 1 } 2025-05-03T08:43:55.614684Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137473967580522:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:55.614716Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NO ... type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:55.688839Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jtan1ztyejhnkjxstf5e0adx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZDNkYWU1OTMtZjViZTZkNGQtMjQ0OWIzYzYtN2M5NmRjMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:56.264182Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137478760317925:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:56.264200Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000107/r3tmp/tmpbWMLmf/pdisk_1.dat 2025-05-03T08:43:56.279785Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9250, node 7 2025-05-03T08:43:56.289498Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:56.289512Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:56.289514Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:56.289544Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30073 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:56.364611Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:56.364648Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:56.366049Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:56.367249Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:56.573352Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:43:56.634189Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137478760319076:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:56.634203Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7500137478760319065:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:56.634219Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:43:56.634705Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:43:56.642986Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7500137478760319079:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:43:56.706631Z node 7 :TX_PROXY ERROR: Actor# [7:7500137478760319154:2809] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:43:56.714838Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan20tsdyh7paakf82ae8m3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NWVlMjliNjctNWJhNTNhYS01Y2I2OTE0MC05ODRmM2E1, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:43:57.327374Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137482465218452:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:57.327395Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/000107/r3tmp/tmpOLEF1k/pdisk_1.dat 2025-05-03T08:43:57.339885Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2226, node 10 2025-05-03T08:43:57.353284Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:57.353297Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:57.353299Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:57.353337Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31445 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:57.428047Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:57.428076Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:57.429597Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:57.430639Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:57.660306Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T08:44:02.327740Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7500137482465218452:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:02.327770Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-05-03T08:44:12.336838Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-05-03T08:44:12.336850Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-05-03T08:44:22.787886Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137589839402995:2511], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:22.787886Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7500137589839403006:2514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:22.787915Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:22.788752Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-05-03T08:44:22.793183Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7500137589839403009:2515], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-03T08:44:22.859869Z node 10 :TX_PROXY ERROR: Actor# [10:7500137589839403076:3135] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-03T08:44:22.872334Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jtan2tc3628rn5eqez2t34ms, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ODllMmZhNjktMTQ1NzkwYzctNWZiNGNlMjgtMTAzZjhhOGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:22.917989Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jtan2tewfd2rbrvk4qyfrvtw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ODllMmZhNjktMTQ1NzkwYzctNWZiNGNlMjgtMTAzZjhhOGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert [GOOD] Test command err: 2025-05-03T08:43:56.110971Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137478539000401:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:56.110991Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f8/r3tmp/tmpHAz3aR/pdisk_1.dat 2025-05-03T08:43:56.170672Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9837, node 1 2025-05-03T08:43:56.191918Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:56.191932Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:56.191934Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:56.191971Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9778 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-03T08:43:56.211419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:56.211444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:56.212943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:56.245268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:56.266914Z node 1 :TICKET_PARSER DEBUG: Ticket 5D99DC2E4B688D80B526B36A56241AE52252E705C88EE296313406ADBE16D92D (ipv6:[::1]:54944) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-05-03T08:43:56.278456Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:54960) has now valid token of root@builtin 2025-05-03T08:43:56.288960Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-05-03T08:43:56.288990Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-05-03T08:43:56.288994Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-05-03T08:43:56.289003Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-05-03T08:43:56.960502Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137479905245929:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:56.960525Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f8/r3tmp/tmpdK5rT2/pdisk_1.dat 2025-05-03T08:43:56.976976Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10657, node 4 2025-05-03T08:43:57.011180Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:57.011192Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:57.011193Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:57.011234Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63537 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:57.060866Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:57.060907Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:57.062385Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:57.064975Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:57.086045Z node 4 :TICKET_PARSER DEBUG: Ticket 5D99DC2E4B688D80B526B36A56241AE52252E705C88EE296313406ADBE16D92D (ipv6:[::1]:52614) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-05-03T08:43:57.100439Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:52630) has now valid token of root@builtin 2025-05-03T08:43:57.113584Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-05-03T08:43:57.113598Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-05-03T08:43:57.113600Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-05-03T08:43:57.113629Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-05-03T08:43:57.604067Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137481749771719:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:57.604162Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f8/r3tmp/tmprQHoL6/pdisk_1.dat 2025-05-03T08:43:57.618221Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9006, node 7 2025-05-03T08:43:57.640694Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:57.640705Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:57.640707Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:57.640747Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20899 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:57.704449Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:57.704477Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:57.705969Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:57.707538Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:57.733151Z node 7 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:35908) has now valid token of root@builtin 2025-05-03T08:43:57.741221Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-05-03T08:43:57.741236Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-05-03T08:43:57.741240Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-05-03T08:43:57.741251Z node 7 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-05-03T08:43:58.251013Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137488178630680:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:58.251036Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f8/r3tmp/tmpPPxy9l/pdisk_1.dat 2025-05-03T08:43:58.265344Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7860, node 10 2025-05-03T08:43:58.284068Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:58.284080Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:58.284081Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:58.284113Z node 10 :NET_CLASSIFIER ERROR: got bad distributa ... channel_credentials=0x62f5c173e50, grpc.internal.event_engine=0x62f5c34a040, grpc.internal.subchannel_pool=0x62f5c5d0bc0, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x62f7e121690, grpc.server_uri=dns:///localhost:20653, grpc.socket_mutator=0x62f7a342280} 2025-05-03T08:44:02.413467Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7500137504744754472:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:02.413560Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f8/r3tmp/tmphw2bGx/pdisk_1.dat 2025-05-03T08:44:02.429982Z node 25 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2187, node 25 2025-05-03T08:44:02.450002Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:02.450015Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:02.450018Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:02.450091Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14607 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:02.513638Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:02.513668Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:02.515089Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:02.517823Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:07.413880Z node 25 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[25:7500137504744754472:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:07.413922Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0503 08:44:12.534170027 101097 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:12.541182475 84313 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:12.549238692 84312 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:12.555266352 84313 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:12.563130964 84312 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:12.569436738 101120 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:12.584360176 101122 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:12.590649247 101120 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:12.599056157 84313 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:12.604487588 84271 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:12.612271284 101198 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:12.618235205 84272 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. 2025-05-03T08:44:13.172213Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7500137553097970545:2206];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:13.172302Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000f8/r3tmp/tmpi0Y2Wn/pdisk_1.dat 2025-05-03T08:44:13.205955Z node 28 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10644, node 28 2025-05-03T08:44:13.228436Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:13.228448Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:13.228450Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:13.228492Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22992 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:13.270382Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:13.270418Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:13.271997Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:13.275753Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:18.172501Z node 28 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[28:7500137553097970545:2206];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:18.172529Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0503 08:44:23.294560606 102458 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:23.300847967 102457 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:23.309205028 102457 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:23.314552607 114493 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:23.322148093 102458 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:23.327884979 102458 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:23.334901261 114503 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:23.340166510 114503 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:23.346359773 114516 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:23.351739358 102441 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:23.358397624 114515 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0503 08:44:23.363513733 102442 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpchGenerator::test_s1 [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). |96.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] >> unstable_connection.py::TestUnstableConnection::test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] >> test_dml.py::TestDML::test_dml[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] |96.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test |96.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test |96.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] |96.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test |96.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] >> YdbOlapStore::DuplicateRows [GOOD] >> YdbOlapStore::LogCountByResource ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_distconf.py::TestKiKiMRDistConfBasic::test_cluster_expand_with_distconf [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] >> TRegisterNodeOverDiscoveryService::ServerWithOutCertVerification_ClientProvidesExpiredCert [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts >> test_dml.py::TestDML::test_dml[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] |97.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] >> test_generator.py::TestTpchGenerator::test_s1_state [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] >> test_generator.py::TestTpchGenerator::test_s1_state_and_parts [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000175/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000175/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 102432 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |97.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpchGenerator::test_s1_state [GOOD] |97.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpchGenerator::test_s1_state_and_parts [GOOD] |97.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] |97.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts [GOOD] Test command err: 2025-05-03T08:44:02.841930Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137505273691317:2212];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:02.842071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000cc/r3tmp/tmp7kGBFD/pdisk_1.dat 2025-05-03T08:44:02.894681Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21212, node 1 2025-05-03T08:44:02.915358Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:02.915365Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:02.915366Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:02.915395Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7626 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-03T08:44:02.941636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:02.941663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:02.948603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:02.968937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:02.993815Z node 1 :TICKET_PARSER DEBUG: Ticket 93273186BBF485053C3D7B60CB75DC32C13CD8FDB1B77F1839CFC2F8E90985FE (ipv6:[::1]:42384) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-05-03T08:44:03.010834Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:42386) has now valid token of root@builtin 2025-05-03T08:44:03.026206Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-05-03T08:44:03.026221Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-05-03T08:44:03.026224Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-05-03T08:44:03.026233Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-05-03T08:44:03.680608Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137507773829194:2212];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:03.681505Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000cc/r3tmp/tmpPn1kYm/pdisk_1.dat 2025-05-03T08:44:03.695683Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26262, node 4 2025-05-03T08:44:03.714271Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:03.714283Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:03.714285Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:03.714326Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21658 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:03.780325Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:03.780357Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:03.781788Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:03.782935Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:03.800698Z node 4 :TICKET_PARSER DEBUG: Ticket 93273186BBF485053C3D7B60CB75DC32C13CD8FDB1B77F1839CFC2F8E90985FE (ipv6:[::1]:40276) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-05-03T08:44:03.812337Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:40284) has now valid token of root@builtin 2025-05-03T08:44:03.823514Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-05-03T08:44:03.823529Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-05-03T08:44:03.823532Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-05-03T08:44:03.823543Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-05-03T08:44:04.374773Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500137513408960201:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:04.374799Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000cc/r3tmp/tmplKjIT2/pdisk_1.dat 2025-05-03T08:44:04.391071Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18621, node 7 2025-05-03T08:44:04.407875Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:04.407892Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:04.407894Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:04.407946Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24235 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:04.475080Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:04.475127Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:04.476521Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:04.478742Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:04.498552Z node 7 :TICKET_PARSER DEBUG: Ticket 334B13352742629480C8F298152BC74F948350137245BFDBCF1757F1EE975CED (ipv6:[::1]:52338) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-05-03T08:44:04.509092Z node 7 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:52342) has now valid token of root@builtin 2025-05-03T08:44:04.518020Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-05-03T08:44:04.518051Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-05-03T08:44:04.518055Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-05-03T08:44:04.518067Z node 7 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-05-03T08:44:05.055646Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137518040365234:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:05.055671Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000cc/r3tmp/tmpgKmlsY/pdisk_1.dat 2025-05-03T08:44:05.068514Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23003, node 10 2025-05-03T08:44:05.090602Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:05.090617Z node 10 :NET_CLASSIFIER WARN: will ... schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:12.343877Z node 19 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[19:7500137524700786119:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:12.343921Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-05-03T08:44:17.473573Z node 19 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:43594) has now valid token of root@builtin 2025-05-03T08:44:17.483073Z node 19 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-05-03T08:44:17.483090Z node 19 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-05-03T08:44:17.483093Z node 19 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-05-03T08:44:17.483104Z node 19 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-05-03T08:44:18.074641Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7500137571768842184:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:18.074666Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000cc/r3tmp/tmpkN7rCX/pdisk_1.dat 2025-05-03T08:44:18.102442Z node 22 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18999, node 22 2025-05-03T08:44:18.123019Z node 22 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:18.123038Z node 22 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:18.123040Z node 22 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:18.123087Z node 22 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26273 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:18.175220Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:18.175252Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:18.177099Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:18.178399Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:23.074846Z node 22 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[22:7500137571768842184:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:23.074883Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-05-03T08:44:28.207129Z node 22 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:41544) has now valid token of root@builtin 2025-05-03T08:44:28.216264Z node 22 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-05-03T08:44:28.216288Z node 22 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-05-03T08:44:28.216291Z node 22 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-05-03T08:44:28.216304Z node 22 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-05-03T08:44:28.790604Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7500137616892479402:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:28.790626Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000cc/r3tmp/tmplIrCI3/pdisk_1.dat 2025-05-03T08:44:28.806781Z node 25 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12810, node 25 2025-05-03T08:44:28.839968Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:28.839980Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:28.839981Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:28.840027Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24822 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:28.889906Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:28.889946Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:28.891508Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:28.895965Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:28.919669Z node 25 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:35774) has now valid token of root@builtin 2025-05-03T08:44:28.928912Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-05-03T08:44:28.928928Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-05-03T08:44:28.928931Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-05-03T08:44:28.928943Z node 25 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-05-03T08:44:29.447719Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7500137617792442153:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:29.447740Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000cc/r3tmp/tmph03ZVD/pdisk_1.dat 2025-05-03T08:44:29.467512Z node 28 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25302, node 28 2025-05-03T08:44:29.485802Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:29.485814Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:29.485816Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:29.485875Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20549 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:29.548280Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:29.548333Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:29.549872Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:29.551341Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:29.579960Z node 28 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:45434) has now valid token of root@builtin 2025-05-03T08:44:29.589981Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-05-03T08:44:29.589997Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-05-03T08:44:29.590000Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-05-03T08:44:29.590010Z node 28 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator >> YdbOlapStore::LogCountByResource [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata |97.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test |97.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_UUID-pk_types31-all_types31-index31---] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/00016b/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/00016b/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 104914 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_dml.py::TestDML::test_dml[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata |97.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogCountByResource [GOOD] Test command err: 2025-05-03T08:43:42.741690Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137417182911331:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:42.741711Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00016c/r3tmp/tmphhwWQr/pdisk_1.dat 2025-05-03T08:43:42.842339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:42.842373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:42.843801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1624, node 1 2025-05-03T08:43:42.854375Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-03T08:43:42.857154Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:42.857158Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:42.857159Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:42.857199Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:42.902739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-1" reason: "YELLOW-e9e2-1231c6b1-2" reason: "YELLOW-e9e2-1231c6b1-3" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-1" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 1 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-2" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 2 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 1 host: "::1" port: 12001 } 2025-05-03T08:43:43.625355Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137422769594565:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:43.625378Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:43:43.633177Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7500137423332102858:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:43.633213Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:43:43.634146Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7500137420918235860:2074];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00016c/r3tmp/tmpOxvdMQ/pdisk_1.dat 2025-05-03T08:43:43.634427Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-03T08:43:43.653232Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27019, node 4 2025-05-03T08:43:43.673261Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:43:43.673278Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:43:43.673280Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:43:43.673329Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:43:43.726597Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:43.726637Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:43.734771Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:43.734797Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:43.735126Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:43:43.735139Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:43:43.735332Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976725657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:43:43.736166Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-05-03T08:43:43.736175Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-05-03T08:43:43.797330Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:43.797418Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:43.797455Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:43:48.625763Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7500137422769594565:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:48.625803Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-05-03T08:43:48.633945Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7500137420918235860:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:48.633978Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-05-03T08:43:48.633415Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7500137423332102858:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:43:48.633456Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Killing node 4 Killing node 5 2025-05-03T08:43:58.643561Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-05-03T08:43:58.643576Z node 4 :IMPORT WARN: Table profiles were not loaded Killing node 6 2025-05-03T08:44:04.560230Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500137512240820951:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:04.560258Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/00016c/r3tmp/tmpZGLJjX/pdisk_1.dat 2025-05-03T08:44:04.573858Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15203, node 8 2025-05-03T08:44:04.591128Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:04.591141Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:04.591143Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:04.591176Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24335 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Ow ... ls info to compute actor: [47:7500137623275209808:3356], channels: 1 2025-05-03T08:44:30.799912Z node 47 :KQP_EXECUTER DEBUG: ActorId: [47:7500137623275209805:3081] TxId: 281474976715674. Ctx: { TraceId: 01jtan321h1m455jvbtvrem768, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [47:7500137623275209808:3356], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-05-03T08:44:30.799916Z node 47 :KQP_EXECUTER DEBUG: ActorId: [47:7500137623275209805:3081] TxId: 281474976715674. Ctx: { TraceId: 01jtan321h1m455jvbtvrem768, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [47:7500137623275209808:3356], 2025-05-03T08:44:30.799947Z node 47 :KQP_EXECUTER DEBUG: ActorId: [47:7500137623275209805:3081] TxId: 281474976715674. Ctx: { TraceId: 01jtan321h1m455jvbtvrem768, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [47:7500137623275209021:3081], seqNo: 1, nRows: 1 2025-05-03T08:44:30.799961Z node 47 :KQP_EXECUTER DEBUG: ActorId: [47:7500137623275209805:3081] TxId: 281474976715674. Ctx: { TraceId: 01jtan321h1m455jvbtvrem768, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [47:7500137623275209808:3356], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 42 Tasks { TaskId: 1 CpuTimeUs: 25 FinishTimeMs: 1746261870799 OutputRows: 1 OutputBytes: 3 ResultRows: 1 ResultBytes: 3 ComputeCpuTimeUs: 3 BuildCpuTimeUs: 22 HostName: "ghrun-26z4vhl2yy" NodeId: 47 CreateTimeMs: 1746261870799 CurrentWaitOutputTimeUs: 4 UpdateTimeMs: 1746261870799 } MaxMemoryUsage: 1048576 } 2025-05-03T08:44:30.799967Z node 47 :KQP_EXECUTER DEBUG: ActorId: [47:7500137623275209805:3081] TxId: 281474976715674. Ctx: { TraceId: 01jtan321h1m455jvbtvrem768, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [47:7500137623275209808:3356], 2025-05-03T08:44:30.799971Z node 47 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=, ActorId: [47:7500137623275209021:3081], ActorState: ExecuteState, TraceId: 01jtan321h1m455jvbtvrem768, Forwarded TEvStreamData to [47:7500137623275209019:3080] 2025-05-03T08:44:30.799973Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7500137623275209808:3356], TxId: 281474976715674, task: 1. Ctx: { TraceId : 01jtan321h1m455jvbtvrem768. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646926 2025-05-03T08:44:30.799989Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7500137623275209808:3356], TxId: 281474976715674, task: 1. Ctx: { TraceId : 01jtan321h1m455jvbtvrem768. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 SrcEndpoint { ActorId { RawX1: 7500137623275209808 RawX2: 4503801490836764 } } DstEndpoint { ActorId { RawX1: 7500137623275209805 RawX2: 4503801490836489 } } InMemory: true } 2025-05-03T08:44:30.799993Z node 47 :KQP_COMPUTE DEBUG: TxId: 281474976715674, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-05-03T08:44:30.800177Z node 47 :KQP_EXECUTER DEBUG: TxId: 281474976715674, send ack to channelId: 1, seqNo: 1, enough: 0, freeSpace: 8388552, to: [47:7500137623275209809:3356] 2025-05-03T08:44:30.800184Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7500137623275209808:3356], TxId: 281474976715674, task: 1. Ctx: { TraceId : 01jtan321h1m455jvbtvrem768. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-05-03T08:44:30.800189Z node 47 :KQP_COMPUTE DEBUG: TxId: 281474976715674, task: 1. Tasks execution finished 2025-05-03T08:44:30.800191Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7500137623275209808:3356], TxId: 281474976715674, task: 1. Ctx: { TraceId : 01jtan321h1m455jvbtvrem768. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-05-03T08:44:30.800199Z node 47 :KQP_COMPUTE DEBUG: TxId: 281474976715674, task: 1. pass away 2025-05-03T08:44:30.800213Z node 47 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715674;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-05-03T08:44:30.800258Z node 47 :KQP_EXECUTER DEBUG: ActorId: [47:7500137623275209805:3081] TxId: 281474976715674. Ctx: { TraceId: 01jtan321h1m455jvbtvrem768, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [47:7500137623275209808:3356], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 125 Tasks { TaskId: 1 CpuTimeUs: 25 FinishTimeMs: 1746261870800 OutputRows: 1 OutputBytes: 3 ResultRows: 1 ResultBytes: 3 ComputeCpuTimeUs: 3 BuildCpuTimeUs: 22 HostName: "ghrun-26z4vhl2yy" NodeId: 47 CreateTimeMs: 1746261870799 UpdateTimeMs: 1746261870800 } MaxMemoryUsage: 1048576 } 2025-05-03T08:44:30.800263Z node 47 :KQP_EXECUTER INFO: TxId: 281474976715674. Ctx: { TraceId: 01jtan321h1m455jvbtvrem768, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [47:7500137623275209808:3356] 2025-05-03T08:44:30.800276Z node 47 :KQP_EXECUTER DEBUG: ActorId: [47:7500137623275209805:3081] TxId: 281474976715674. Ctx: { TraceId: 01jtan321h1m455jvbtvrem768, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-05-03T08:44:30.800281Z node 47 :KQP_EXECUTER DEBUG: ActorId: [47:7500137623275209805:3081] TxId: 281474976715674. Ctx: { TraceId: 01jtan321h1m455jvbtvrem768, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000125s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-05-03T08:44:30.800289Z node 47 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=, ActorId: [47:7500137623275209021:3081], ActorState: ExecuteState, TraceId: 01jtan321h1m455jvbtvrem768, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-05-03T08:44:30.800353Z node 47 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=, ActorId: [47:7500137623275209021:3081], ActorState: ExecuteState, TraceId: 01jtan321h1m455jvbtvrem768, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 109.1 QueriesCount: 1 2025-05-03T08:44:30.800367Z node 47 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=, ActorId: [47:7500137623275209021:3081], ActorState: ExecuteState, TraceId: 01jtan321h1m455jvbtvrem768, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-05-03T08:44:30.800382Z node 47 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=, ActorId: [47:7500137623275209021:3081], ActorState: ExecuteState, TraceId: 01jtan321h1m455jvbtvrem768, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-05-03T08:44:30.800383Z node 47 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=, ActorId: [47:7500137623275209021:3081], ActorState: ExecuteState, TraceId: 01jtan321h1m455jvbtvrem768, EndCleanup, isFinal: 1 2025-05-03T08:44:30.800391Z node 47 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=, ActorId: [47:7500137623275209021:3081], ActorState: ExecuteState, TraceId: 01jtan321h1m455jvbtvrem768, Sent query response back to proxy, proxyRequestId: 5, proxyId: [47:7500137618980238456:2278] 2025-05-03T08:44:30.800393Z node 47 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=, ActorId: [47:7500137623275209021:3081], ActorState: unknown state, TraceId: 01jtan321h1m455jvbtvrem768, Cleanup temp tables: 0 2025-05-03T08:44:30.800478Z node 47 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1746261870612, txId: 18446744073709551615] shutting down 2025-05-03T08:44:30.800502Z node 47 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=47&id=NWRjOTVlYWYtMmY4ZGEzOWYtNmIyMGU3NTItYWI1NmFjODU=, ActorId: [47:7500137623275209021:3081], ActorState: unknown state, TraceId: 01jtan321h1m455jvbtvrem768, Session actor destroyed 2025-05-03T08:44:30.904147Z node 47 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[47:7500137618980239236:2326];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; 2025-05-03T08:44:30.904156Z node 47 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[47:7500137618980239228:2324];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-05-03T08:44:30.904627Z node 47 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[47:7500137618980239225:2323];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2025-05-03T08:44:30.904633Z node 47 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[47:7500137618980239236:2326];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-03T08:44:30.904641Z node 47 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[47:7500137618980239228:2324];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-03T08:44:30.905146Z node 47 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[47:7500137618980239233:2325];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-05-03T08:44:30.905653Z node 47 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[47:7500137618980239225:2323];fline=actor.cpp:33;event=skip_flush_writing; 2025-05-03T08:44:30.905659Z node 47 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[47:7500137618980239233:2325];fline=actor.cpp:33;event=skip_flush_writing; >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_uint32 |97.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000193/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000193/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 99116 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test |97.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_distconf.py::TestKiKiMRDistConfBasic::test_cluster_expand_with_distconf [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_uint32 [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_uint64_and_string ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_uint64_and_string [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_list >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_list [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_struct ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/00017a/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/00017a/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 101278 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint32[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_struct [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_multiple_files >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_multiple_files [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_ignore_excess_parameters >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_ignore_excess_parameters [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_script_from_file >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_script_from_file [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_uint32 >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_uint32 [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_uint64_and_string >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_uint64_and_string [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_list >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_list [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_struct >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_struct [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_multiple_files |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_multiple_files [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_ignore_excess_parameters >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint32[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint32[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_ignore_excess_parameters [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_script_from_file >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types7-all_types7-index7---] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint32[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint64_and_string[data] >> test_copy_table.py::TestCopyTable::test_copy_table[table_all_types-pk_types12-all_types12-index12---] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_script_from_file [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint64_and_string[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint64_and_string[scan] |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint64_and_string[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_list[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_list[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_list[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_list[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_struct[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_struct[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_struct[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_struct[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_multiple_files[data] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromJson::test_script_from_file >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_multiple_files[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_multiple_files[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_multiple_files[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_ignore_excess_parameters[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_ignore_excess_parameters[data] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_ignore_excess_parameters[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_ignore_excess_parameters[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_script_from_file[data] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_script_from_file[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_script_from_file[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_script_from_file[scan] [GOOD] >> test_dml.py::TestDML::test_dml[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_to_dir [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000159/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000159/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 115807 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromJson::test_script_from_file [GOOD] >> test.py::test_plans[row] >> test.py::test_run_benchmark[scan-row] |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test.py::test_run_benchmark[scan-column] >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata [GOOD] |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000145/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000145/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 118430 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata [GOOD] >> test.py::test_run_benchmark[generic-row] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint32-pk_types23-all_types23-index23---] >> test.py::test_run_determentistic[column] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types7-all_types7-index7---] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/00016f/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/00016f/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 104675 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime64-pk_types37-all_types37-index37---] >> test.py::test_plans[row] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args0-row] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test.py::test_run_determentistic[row] |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_plans[row] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_ydb_backup.py::TestRecursiveNonConsistent::test_recursive_table_backup_from_different_places |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args0-column] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime-pk_types33-all_types33-index33---] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args1-row] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args1-column] >> test_dml.py::TestDML::test_dml[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] >> test_quota_exhaustion.py::TestYdbWorkload::test_delete >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args2-row] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args2-column] |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata [GOOD] >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata [GOOD] >> overlapping_portions.py::TestOverlappingPortions::test >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args3-row] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args3-column] |97.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[json-additional_args4-row] >> test_ydb_impex.py::TestImpex::test_simple[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[json-additional_args4-column] >> test_ydb_backup.py::TestRecursiveNonConsistent::test_recursive_table_backup_from_different_places [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args0-row] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000152/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000152/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 116834 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args0-column] |97.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args1-row] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args1-column] |97.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args2-row] >> test_dml.py::TestDML::test_dml[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args2-column] |97.6%| [TA] $(B)/ydb/tests/datashard/copy_table/test-results/py3test/{meta.json ... results_accumulator.log} |97.6%| [TA] {RESULT} $(B)/ydb/tests/datashard/copy_table/test-results/py3test/{meta.json ... results_accumulator.log} >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args3-row] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000131/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000131/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 127980 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args3-column] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int64-pk_types19-all_types19-index19---] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args0-row] |97.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args0-column] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args1-row] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args1-column] |97.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store [GOOD] |97.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] >> test.py::test_plans[column] >> YdbYqlClient::SimpleColumnFamilies [GOOD] >> YdbYqlClient::TableKeyRangesSinglePartition >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args2-row] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date32-pk_types36-all_types36-index36---] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args2-column] |97.7%| [TA] $(B)/ydb/tests/datashard/dump_restore/test-results/py3test/{meta.json ... results_accumulator.log} |97.7%| [TA] {RESULT} $(B)/ydb/tests/datashard/dump_restore/test-results/py3test/{meta.json ... results_accumulator.log} >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args3-row] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args3-column] |97.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> YdbYqlClient::TableKeyRangesSinglePartition [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[json-additional_args4-row] >> test_ydb_impex.py::TestImpex::test_excess_columns[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[json-additional_args4-column] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp64-pk_types38-all_types38-index38---] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TableKeyRangesSinglePartition [GOOD] Test command err: 2025-05-03T08:44:03.346948Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500137508511324005:2224];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:03.347039Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000c8/r3tmp/tmp3WP18r/pdisk_1.dat 2025-05-03T08:44:03.412601Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28309, node 1 2025-05-03T08:44:03.429340Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:03.429353Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:03.429355Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:03.429396Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22393 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-03T08:44:03.446047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:03.446078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-05-03T08:44:03.447592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:03.484200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:03.641596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137508511324804:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:03.641632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:03.673006Z node 1 :TX_PROXY ERROR: Actor# [1:7500137508511324826:2610] txid# 281474976715658, Access denied for badguy@builtin on path /Root, with access CreateTable 2025-05-03T08:44:03.673065Z node 1 :TX_PROXY ERROR: Actor# [1:7500137508511324826:2610] txid# 281474976715658, issues: { message: "Access denied for badguy@builtin on path /Root" issue_code: 200000 severity: 1 } 2025-05-03T08:44:03.682195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500137508511324837:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:03.682224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:03.685524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-03T08:44:04.389187Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500137510889485051:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:04.389213Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000c8/r3tmp/tmpXhBZHj/pdisk_1.dat 2025-05-03T08:44:04.403371Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9732, node 4 2025-05-03T08:44:04.421515Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:04.421543Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:04.421545Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:04.421596Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4910 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:04.489538Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:04.489569Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:04.491129Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:04.491606Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:04.684089Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137510889485999:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:04.684143Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:04.685094Z node 4 :TX_PROXY ERROR: Actor# [4:7500137510889486020:2598] txid# 281474976715658, Access denied for badguy@builtin on path /Root, with access CreateTable 2025-05-03T08:44:04.685140Z node 4 :TX_PROXY ERROR: Actor# [4:7500137510889486020:2598] txid# 281474976715658, issues: { message: "Access denied for badguy@builtin on path /Root" issue_code: 200000 severity: 1 } 2025-05-03T08:44:04.688164Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7500137510889486032:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:04.688191Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-03T08:44:04.691059Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000c8/r3tmp/tmpB0dk2T/pdisk_1.dat 2025-05-03T08:44:05.445175Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-03T08:44:05.453820Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10402, node 7 2025-05-03T08:44:05.473834Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:05.473850Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:05.473852Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:05.473898Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12290 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:05.539639Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:05.539672Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:05.541183Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:05.542688Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:12290 2025-05- ... YjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.716862Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719644. Ctx: { TraceId: 01jtan3wfcbfdh3tejhfe56jxb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.729338Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719645. Ctx: { TraceId: 01jtan3wfr9hg0r3e5ahqshph6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.742313Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719646. Ctx: { TraceId: 01jtan3wg5811xjd773he3zhs9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.755026Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719647. Ctx: { TraceId: 01jtan3wgj46je5nb4vyh2r7n4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.767832Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719648. Ctx: { TraceId: 01jtan3wgz7sgvrqp3vscc13ae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.780399Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719649. Ctx: { TraceId: 01jtan3whbewjqtmc4mgrwbgy6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.792063Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719650. Ctx: { TraceId: 01jtan3whr8fh1ed41m9g1rf2b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.804148Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719651. Ctx: { TraceId: 01jtan3wj3dx3mmta6v0thtvqk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.815774Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719652. Ctx: { TraceId: 01jtan3wjgajagh7gmrwtrmvf2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.827026Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719653. Ctx: { TraceId: 01jtan3wjvd6j1531tjfs6s0qb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.838318Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719654. Ctx: { TraceId: 01jtan3wk61r018f5rtrwgshpz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.850995Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719655. Ctx: { TraceId: 01jtan3wkj7gexdd7z717gk5ya, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.861072Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719656. Ctx: { TraceId: 01jtan3wky4jxnwb6wwhg7tgk7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.871890Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719657. Ctx: { TraceId: 01jtan3wm824b3wxgav5yybgpz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.883812Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719658. Ctx: { TraceId: 01jtan3wmk029kdcmwka4ezthf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.895372Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719659. Ctx: { TraceId: 01jtan3wmz3xskbjxvpdjmm2v5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.908073Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719660. Ctx: { TraceId: 01jtan3wnb8yhc9d5w0x3pnjq1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.920224Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719661. Ctx: { TraceId: 01jtan3wnr9zdrvncbb72bd5z6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.931585Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719662. Ctx: { TraceId: 01jtan3wp34tmk6bcd7jnt6m86, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.942744Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719663. Ctx: { TraceId: 01jtan3wpebhcx979ahycss7h0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.954475Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719664. Ctx: { TraceId: 01jtan3wpt7sg33sspk7sqtgv7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.966455Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719665. Ctx: { TraceId: 01jtan3wq69j8qb7q06td7yw3h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.979840Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719666. Ctx: { TraceId: 01jtan3wqj38r7wn0nj96bbwbg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:57.992079Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719667. Ctx: { TraceId: 01jtan3wqz0jtzjghfrk6zm9jd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:58.004731Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719668. Ctx: { TraceId: 01jtan3wrc5awzfbdq6j065r00, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTk2NzJhODYtOWJkZDRlNWUtODdhZDM2YmMtYjM5MTkxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-05-03T08:44:58.008875Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-05-03T08:44:58.009294Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-05-03T08:44:58.626070Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500137744120973447:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-03T08:44:58.626090Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/bb6b/0000c8/r3tmp/tmpo01F6Z/pdisk_1.dat 2025-05-03T08:44:58.642595Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27085, node 10 2025-05-03T08:44:58.657836Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T08:44:58.657846Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T08:44:58.657848Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T08:44:58.657890Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13872 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T08:44:58.726797Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T08:44:58.726839Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T08:44:58.728514Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T08:44:58.730132Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T08:44:58.733660Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-03T08:44:58.933541Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> test_split_merge.py::TestSplitMerge::test_merge_split[table_String-pk_types29-all_types29-index29---] >> test_ydb_impex.py::TestImpex::test_excess_columns[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args0-row] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args0-column] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args1-row] |97.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args1-column] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000135/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000135/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 126106 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_distconf.py::TestKiKiMRDistConfBasic::test_cluster_is_operational_with_distconf >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args2-row] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_DyNumber-pk_types28-all_types28-index28---] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args2-column] >> test.py::test_run_benchmark[scan-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args3-row] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args3-column] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[json-additional_args4-row] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp-pk_types34-all_types34-index34---] >> test_ydb_impex.py::TestImpex::test_stdin[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[json-additional_args4-column] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_json[data] >> test_ydb_impex.py::TestImpex::test_stdin[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args0-row] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_benchmark[scan-row] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args0-column] >> test_dml.py::TestDML::test_dml[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args1-row] |97.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args1-column] >> test.py::test_plans[column] [GOOD] |97.8%| [TA] $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args2-row] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args2-column] >> test.py::test_run_benchmark[generic-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args3-row] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args3-row] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_json[data] [GOOD] >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args3-column] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_json[scan] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args3-column] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_json[scan] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[json-additional_args4-row] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_csv[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_csv[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_tsv[data] >> test_ydb_impex.py::TestImpex::test_multiple_files[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[json-additional_args4-column] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_tsv[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_raw[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_raw[data] [GOOD] >> test_ydb_backup.py::TestRecursiveSchemeOnly::test_recursive_table_backup_from_different_places >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_raw[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_raw[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_json[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_json[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_json[scan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_plans[column] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test_ydb_impex.py::TestImpex::test_multiple_files[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args0-row] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_json[scripting] |97.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_json[scan] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args0-column] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_csv[data] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args0-column] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_csv[scan] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args1-row] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_tsv[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_tsv[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_mix_json_and_binary[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_mix_json_and_binary[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_mix_json_and_binary[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_mix_json_and_binary[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_json[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_json[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_json[scan] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args1-column] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_benchmark[generic-row] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. |97.9%| [TA] {RESULT} $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_json[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_csv[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_csv[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_tsv[data] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args2-row] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_tsv[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_json[data] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args2-column] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_json[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_json[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_json[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_csv[data] >> test.py::test_run_determentistic[row] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_csv[scan] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args3-row] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_tsv[data] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int8-pk_types21-all_types21-index21---] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_tsv[scan] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args3-column] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_raw[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_raw[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_raw[scan] |97.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_raw[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_raw[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_json[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_json[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_raw[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_raw[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_json[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_csv[scripting] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[json-additional_args4-row] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_raw[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_json[data] >> test_ydb_backup.py::TestRecursiveSchemeOnly::test_recursive_table_backup_from_different_places [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_json[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_csv[stream] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[json-additional_args4-column] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal150-pk_types25-all_types25-index25---] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_json[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_tsv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_json[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_csv[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_tsv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_csv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_raw[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_tsv[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_raw[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_raw[stream] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args0-row] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_tsv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_raw[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_json[scripting] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_json[sql] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_raw[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_json[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_json[stream] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args0-column] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_raw[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_raw[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_json[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_raw[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_json[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_csv[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_csv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_json[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_json[scan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_determentistic[row] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_tsv[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_tsv[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_json[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_tsv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_csv[data] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args1-row] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_tsv[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_csv[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_mix_json_and_binary[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_csv[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_tsv[data] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args1-column] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_mix_json_and_binary[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_tsv[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_mix_json_and_binary[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_tsv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_mix_json_and_binary[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_json[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_json[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_json[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_json[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_json[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_json[stream] >> test.py::test_run_determentistic[column] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_json[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_json[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_csv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_csv[data] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args2-row] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_csv[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_csv[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_csv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_csv[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_tsv[data] |97.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args2-column] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_tsv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_tsv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_tsv[stream] |97.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_tsv[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_tsv[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_csv[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_json[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_csv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_json[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_json[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_tsv[data] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args3-row] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_json[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_tsv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_csv[scripting] >> test.py::test_run_benchmark[scan-column] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_csv[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_tsv[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_csv[stream] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args3-column] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_csv[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_csv[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_csv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_tsv[scripting] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_csv[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_tsv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_tsv[data] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_csv[sql] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_tsv[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_tsv[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_raw[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_tsv[scan] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_tsv[sql] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args0-row] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_raw[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_csv[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_raw[stream] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_raw[sql] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_csv[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_raw[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_csv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_raw[scripting] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_raw[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_json[sql] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_csv[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_raw[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_raw[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_tsv[data] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_csv[sql] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_tsv[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_raw[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_json[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_tsv[scan] >> test_dml.py::TestDML::test_dml[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_tsv[sql] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_tsv[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_json[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_json[stream] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_json[sql] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_determentistic[column] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_json[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_csv[scripting] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_csv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_tsv[sql] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval-pk_types35-all_types35-index35---] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_csv[stream] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_raw[sql] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_tsv[scripting] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_raw[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_raw[sql] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_tsv[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_raw[scripting] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_raw[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_json[sql] |98.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_distconf.py::TestKiKiMRDistConfBasic::test_cluster_is_operational_with_distconf [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_raw[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_raw[stream] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_csv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_tsv[sql] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_benchmark[scan-column] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. |98.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_raw[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_json[scripting] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_raw[sql] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_json[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_json[stream] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_raw[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_json[sql] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_json[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_csv[scripting] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_csv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_tsv[sql] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_csv[stream] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_tsv[sql] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args0-column] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_tsv[scripting] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_json[sql] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_tsv[stream] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_csv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_tsv[sql] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_json[scripting] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_bad_header_csv[sql] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_json[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_json[stream] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_bad_header_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_bad_header_tsv[sql] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_json[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_csv[scripting] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_bad_header_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_no_header_csv[sql] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_csv[stream] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_no_header_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_no_header_tsv[sql] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_tsv[scripting] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_no_header_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_skip_rows_csv[sql] |98.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_tsv[stream] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_skip_rows_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_skip_rows_tsv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_skip_rows_tsv[sql] [GOOD] |98.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_csv[scripting] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_csv[stream] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_tsv[scripting] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_tsv[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_csv[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_csv[stream] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args1-row] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_tsv[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_tsv[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_csv[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_csv[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_tsv[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_tsv[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_tsv[stream] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Utf8-pk_types30-all_types30-index30---] >> data_correctness.py::TestDataCorrectness::test [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args1-column] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args2-row] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args2-column] |98.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_distconf.py::TestKiKiMRDistConfBasic::test_cluster_is_operational_with_distconf [GOOD] >> test_ydb_backup.py::TestRecursiveConsistent::test_recursive_table_backup_from_different_places >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint64-pk_types22-all_types22-index22---] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args3-row] >> test_dml.py::TestDML::test_dml[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> test_ydb_backup.py::TestRecursiveConsistent::test_recursive_table_backup_from_different_places [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args3-column] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] |98.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_tsv[scan] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[json-additional_args4-row] >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] |98.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_ydb_impex.py::TestImpex::test_big_dataset[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[json-additional_args4-column] >> test_generator.py::TestTpcdsGenerator::test_s1_parts [GOOD] >> test_ydb_scripting.py::TestExecuteScriptFromStdinWithWideOutput::test_wide_table >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir |98.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpcdsGenerator::test_s1_parts [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_format_parquet[row] [SKIPPED] >> test_ydb_impex.py::TestImpex::test_format_parquet[column] [SKIPPED] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args0-row] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args0-column] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args1-row] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args1-column] |98.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args2-row] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/00018b/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/00018b/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback !!! simulating S3 hang up -- sending SIGSTOP !!! simulating S3 recovery -- sending SIGCONT contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 104219 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args2-column] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] >> test_ydb_scripting.py::TestExecuteScriptFromStdinWithWideOutput::test_wide_table [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args3-row] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args3-column] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[json-additional_args4-row] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[json-additional_args4-column] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint8-pk_types24-all_types24-index24---] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args0-row] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args0-column] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args1-row] >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args1-column] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args2-row] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args2-column] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args3-row] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args3-column] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[json-additional_args4-row] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[json-additional_args4-column] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[json-additional_args4-column] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_ydb_backup.py::TestSingleBackupRestore::test_single_table_with_data_backup_restore >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] >> test_generator.py::TestTpcdsGenerator::test_s1 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> data_correctness.py::TestDataCorrectness::test [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/00014e/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/00014e/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 35518 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_quota_exhaustion.py::TestYdbWorkload::test |98.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpcdsGenerator::test_s1 [GOOD] |98.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_ydb_backup.py::TestSingleBackupRestore::test_single_table_with_data_backup_restore [GOOD] >> test_generator.py::TestTpcdsGenerator::test_s1_state [GOOD] |98.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir [GOOD] |98.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpcdsGenerator::test_s1_state [GOOD] |98.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] >> test.py::test_run_benchmark[generic-column] >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates >> test_ydb_sql.py::TestExecuteSqlFromStdinWithWideOutput::test_wide_table >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] |98.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test |98.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test |98.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_scripting.py::TestExecuteScriptFromStdinWithWideOutput::test_wide_table [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_all_types-pk_types12-all_types12-index12---] >> test_ydb_sql.py::TestExecuteSqlFromStdinWithWideOutput::test_wide_table [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date-pk_types32-all_types32-index32---] >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata |98.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] >> test_ydb_backup.py::TestBackupRestoreInRoot::test_table_backup_restore_in_root >> test_ydb_backup.py::TestBackupRestoreInRoot::test_table_backup_restore_in_root [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithPgSyntax::test_pg_syntax |98.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] |98.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[json-additional_args4-column] [GOOD] |98.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_ydb_sql.py::TestExecuteSqlWithPgSyntax::test_pg_syntax [GOOD] >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata [GOOD] >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete [GOOD] |98.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval64-pk_types39-all_types39-index39---] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int32-pk_types20-all_types20-index20---] |98.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] |98.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test.py::test_run_benchmark[generic-column] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] >> test_dml.py::TestDML::test_dml[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_benchmark[generic-column] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. |98.4%| [TA] $(B)/ydb/tests/functional/clickbench/test-results/py3test/{meta.json ... results_accumulator.log} |98.4%| [TA] {RESULT} $(B)/ydb/tests/functional/clickbench/test-results/py3test/{meta.json ... results_accumulator.log} >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] >> test_ydb_backup.py::TestBackupRestoreInRootSchemeOnly::test_table_backup_restore_in_root_scheme_only |98.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata [GOOD] |98.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] >> test_ydb_backup.py::TestBackupRestoreInRootSchemeOnly::test_table_backup_restore_in_root_scheme_only [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] |98.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] |98.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] |98.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_sql.py::TestExecuteSqlWithPgSyntax::test_pg_syntax [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Date-pk_types18-all_types18-index18-Date--] |98.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_generator.py::TestTpcdsGenerator::test_s1_state_and_parts [GOOD] |98.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpcdsGenerator::test_s1_state_and_parts [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] |98.5%| [TA] $(B)/ydb/tests/functional/benchmarks_init/test-results/py3test/{meta.json ... results_accumulator.log} |98.5%| [TA] {RESULT} $(B)/ydb/tests/functional/benchmarks_init/test-results/py3test/{meta.json ... results_accumulator.log} >> test_ydb_backup.py::TestIncompleteBackup::test_incomplete_backup_will_not_be_restored >> test_ydb_backup.py::TestIncompleteBackup::test_incomplete_backup_will_not_be_restored [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal3510-pk_types27-all_types27-index27---] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] |98.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal3510-pk_types27-all_types27-index27---] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] |98.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal229-pk_types26-all_types26-index26---] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/0000a6/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/0000a6/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 195505 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_dml.py::TestDML::test_dml[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] >> test_ydb_backup.py::TestAlterBackupRestore::test_alter_table_with_data_backup_restore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/0000a0/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/0000a0/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 197643 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |98.6%| [TA] $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} |98.6%| [TA] {RESULT} $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} >> unstable_connection.py::TestUnstableConnection::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> test_ydb_backup.py::TestAlterBackupRestore::test_alter_table_with_data_backup_restore [GOOD] |98.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] |98.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] |98.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] |98.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_UUID-pk_types31-all_types31-index31---] [GOOD] >> test_workload.py::TestYdbKvWorkload::test[row] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] [GOOD] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] |98.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_UUID-pk_types31-all_types31-index31---] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint32-pk_types23-all_types23-index23---] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime64-pk_types37-all_types37-index37---] [GOOD] >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata [GOOD] |98.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts >> test_ydb_backup.py::TestPermissionsBackupRestoreSingleTable::test_single_table >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update >> overlapping_portions.py::TestOverlappingPortions::test [FAIL] >> test_tpch_import.py::TestS3TpchImport::test_import_and_export >> ReadUpdateWrite::Load >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime-pk_types33-all_types33-index33---] [GOOD] >> test_ydb_backup.py::TestPermissionsBackupRestoreSingleTable::test_single_table [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel |98.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime-pk_types33-all_types33-index33---] [GOOD] |98.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime64-pk_types37-all_types37-index37---] [GOOD] |98.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata [GOOD] |98.8%| [TA] $(B)/ydb/tests/functional/config/test-results/py3test/{meta.json ... results_accumulator.log} |98.8%| [TA] {RESULT} $(B)/ydb/tests/functional/config/test-results/py3test/{meta.json ... results_accumulator.log} |98.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] >> alter_compression.py::TestAlterCompression::test_all_supported_compression >> zip_bomb.py::TestZipBomb::test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int64-pk_types19-all_types19-index19---] [GOOD] >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.NO] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date32-pk_types36-all_types36-index36---] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp64-pk_types38-all_types38-index38---] [GOOD] |98.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint32-pk_types23-all_types23-index23---] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_String-pk_types29-all_types29-index29---] [GOOD] >> ReadUpdateWrite::Load [GOOD] |98.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int64-pk_types19-all_types19-index19---] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[1] |98.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_DyNumber-pk_types28-all_types28-index28---] [GOOD] >> tool::import_test [GOOD] |98.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp64-pk_types38-all_types38-index38---] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/high_load/unittest >> ReadUpdateWrite::Load [GOOD] Test command err: Step 1. only write Was written: 0 MiB, Speed: 0 MiB/s Write: 10% 0.024751s 30% 0.024751s 50% 0.024751s 90% 0.024751s 99% 0.024751s Write: 10% 0.025340s 30% 0.025340s 50% 0.025340s 90% 0.025340s 99% 0.025340s Write: 10% 0.029576s 30% 0.029576s 50% 0.029576s 90% 0.029576s 99% 0.029576s Write: 10% 0.029809s 30% 0.029809s 50% 0.029809s 90% 0.029809s 99% 0.029809s Write: 10% 0.030977s 30% 0.030977s 50% 0.030977s 90% 0.030977s 99% 0.030977s Write: 10% 0.033854s 30% 0.033854s 50% 0.033854s 90% 0.033854s 99% 0.033854s Write: 10% 0.036194s 30% 0.036194s 50% 0.036194s 90% 0.036194s 99% 0.036194s Write: 10% 0.033667s 30% 0.033667s 50% 0.033667s 90% 0.033667s 99% 0.033667s Write: 10% 0.039073s 30% 0.039073s 50% 0.039073s 90% 0.039073s 99% 0.039073s Write: 10% 0.038306s 30% 0.038306s 50% 0.038306s 90% 0.038306s 99% 0.038306s Write: 10% 0.037991s 30% 0.037991s 50% 0.037991s 90% 0.037991s 99% 0.037991s Write: 10% 0.039611s 30% 0.039611s 50% 0.039611s 90% 0.039611s 99% 0.039611s Write: 10% 0.039476s 30% 0.039476s 50% 0.039476s 90% 0.039476s 99% 0.039476s Write: 10% 0.038081s 30% 0.038081s 50% 0.038081s 90% 0.038081s 99% 0.038081s Write: 10% 0.039136s 30% 0.039136s 50% 0.039136s 90% 0.039136s 99% 0.039136s Write: 10% 0.038933s 30% 0.038933s 50% 0.038933s 90% 0.038933s 99% 0.038933s Write: 10% 0.037690s 30% 0.037690s 50% 0.037690s 90% 0.037690s 99% 0.037690s Write: 10% 0.038369s 30% 0.038369s 50% 0.038369s 90% 0.038369s 99% 0.038369s Write: 10% 0.042729s 30% 0.042729s 50% 0.042729s 90% 0.042729s 99% 0.042729s Write: 10% 0.038091s 30% 0.038091s 50% 0.038091s 90% 0.038091s 99% 0.038091s Write: 10% 0.037712s 30% 0.037712s 50% 0.037712s 90% 0.037712s 99% 0.037712s Write: 10% 0.041233s 30% 0.041233s 50% 0.041233s 90% 0.041233s 99% 0.041233s Write: 10% WriteWrite: 10% : 10% 0.036653s0.037395s0.040939s 30% 0.040939s 50% 0.040939s 90% 30% 0.036653s 50% 30% 0.037395s 50% 0.037395s 90% 0.037395s 99% 0.036653s 90% 0.036653s 99% 0.036653s0.037395s 0.040939s 99% 0.040939s Write: 10% 0.039714s 30% 0.039714s 50% 0.039714s 90% 0.039714s 99% 0.039714s Write: 10% 0.036949s 30% 0.036949s 50% 0.036949s 90% 0.036949s 99% 0.036949s Write: 10% 0.035998s 30% 0.035998s 50% 0.035998s 90% 0.035998s 99% 0.035998s Write: 10% 0.037695s 30% 0.037695s 50% 0.037695s 90% 0.037695s 99% 0.037695s Write: 10% 0.036621sWrite: 10% 0.035944s 30% 30% 0.036621s 50% 0.036621s 90% 0.035944s 50% 0.035944s 90% 0.035944s 99% 0.036621s 99% 0.036621s 0.035944s Write: 10% 0.037351s 30% 0.037351s 50% 0.037351s 90% 0.037351s 99% 0.037351s Write: 10% 0.036018s 30% 0.036018s 50% 0.036018s 90% 0.036018s 99% 0.036018s Write: 10% 0.038857s 30% 0.038857s 50% 0.038857s 90% 0.038857s 99% 0.038857s Write: 10% 0.036017s 30% 0.036017s 50% 0.036017s 90% 0.036017s 99% 0.036017s Write: 10% 0.030146s 30% 0.030146s 50% 0.030146s 90% 0.030146s 99% 0.030146s Write: 10% 0.032780s 30% 0.032780s 50% 0.032780s 90% 0.032780s 99% 0.032780s Write: 10% 0.038975s 30% 0.038975s 50% 0.038975s 90% 0.038975s 99% 0.038975s Write: 10% 0.033200s 30% 0.033200s 50% 0.033200s 90% 0.033200s 99% 0.033200s Write: 10% 0.032768s 30% 0.032768s 50% 0.032768s 90% 0.032768s 99% 0.032768s Write: 10% 0.031129s 30% 0.031129s 50% 0.031129s 90% 0.031129s 99% 0.031129s Write: 10% 0.030056s 30% 0.030056s 50% 0.030056s 90% 0.030056s 99% 0.030056s Write: 10% 0.029467s 30% 0.029467s 50% 0.029467s 90% 0.029467s 99% 0.029467s Write: 10% Write0.033176s 30% 0.033176s 50% : 10% 0.031961s 30% 0.031961s 50% Write0.033176s 90% 0.033176s: 10% 0.031961s 90% 0.031961s 99% 0.031961s0.032854s 99% 0.033176s 30% 0.032854s 50% 0.032854s 90% 0.032854s 99% 0.032854s Write: 10% 0.030380s 30% 0.030380s 50% 0.030380s 90% 0.030380s 99% 0.030380s Write: 10% 0.035600s 30% 0.035600s 50% 0.035600s 90% 0.035600s 99% 0.035600s WriteWrite: 10% 0.034675s 30% 0.034675s 50% : 10% 0.030431s 30% 0.030431s0.034675s 90% 0.034675s 99% 0.034675s 50% 0.030431s 90% 0.030431s 99% 0.030431s Write: 10% 0.033442s 30% 0.033442s 50% 0.033442s 90% 0.033442s 99% 0.033442s Write: 10% 0.030854s 30% 0.030854s 50% 0.030854s 90% 0.030854s 99% 0.030854s Write: 10% 0.032026s 30% 0.032026s 50% 0.032026s 90% 0.032026s 99% 0.032026s Write: 10% 0.029364s 30% 0.029364s 50% 0.029364s 90% 0.029364s 99% 0.029364s Write: 10% 0.029624s 30% 0.029624s 50% 0.029624s 90% 0.029624s 99% 0.029624s Write: 10% 0.042068s 30% 0.042068s 50% 0.042068s 90% 0.042068s 99% 0.042068s Write: 10% 0.032669s 30% 0.032669s 50% 0.032669s 90% 0.032669s 99% 0.032669s Write: 10% 0.031312s 30% 0.031312s 50% 0.031312s 90% 0.031312s 99% 0.031312s Write: 10% 0.030049s 30% 0.030049s 50% 0.030049s 90% 0.030049s 99% 0.030049s Write: 10% 0.031480s 30% 0.031480sWrite: 10% 0.028780s 30% 0.028780s 50% 0.031480s 90% 0.031480s 99% 0.031480s 50% 0.028780s 90% 0.028780s 99% 0.028780s Write: 10% 0.030220s 30% 0.030220s 50% 0.030220s 90% 0.030220s 99% 0.030220s Write: 10% 0.029549s 30% 0.029549s 50% 0.029549s 90% 0.029549s 99% 0.029549s Write: 10% 0.029212s 30% 0.029212s 50% 0.029212s 90% 0.029212s 99% 0.029212s Step 2. read write Write: 10% 0.010939s 30% 0.010939s 50% 0.010939s 90% 0.010939s 99% 0.010939s Write: 10% 0.012969s 30% 0.012969s 50% 0.012969s 90% 0.012969s 99% 0.012969s Write: 10% 0.013617s 30% 0.013617s 50% 0.013617s 90% 0.013617s 99% 0.013617s Write: 10% 0.013182s 30% 0.013182s 50% 0.013182s 90% 0.013182s 99% 0.013182s Write: 10% 0.017692s 30% 0.017692s 50% 0.017692s 90% 0.017692s 99% 0.017692s Write: 10% 0.021642s 30% 0.021642s 50% 0.021642s 90% 0.021642s 99% 0.021642s Write: 10% 0.025692s 30% 0.025692s 50% 0.025692s 90% 0.025692s 99% 0.025692s Write: 10% 0.027111s 30% 0.027111s 50% 0.027111s 90% 0.027111s 99% 0.027111s Write: 10% 0.028520s 30% 0.028520s 50% 0.028520s 90% 0.028520s 99% 0.028520s Write: 10% 0.028732s 30% 0.028732s 50% 0.028732s 90% 0.028732s 99% 0.028732s Write: 10% 0.029404s 30% 0.029404s 50% 0.029404s 90% 0.029404s 99% 0.029404s Write: 10% 0.029918s 30% 0.029918s 50% 0.029918s 90% 0.029918s 99% 0.029918s Write: 10% 0.028982s 30% 0.028982s 50% 0.028982s 90% 0.028982s 99% 0.028982s Write: 10% 0.030063s 30% 0.030063s 50% 0.030063s 90% 0.030063s 99% 0.030063s Write: 10% 0.030349s 30% 0.030349s 50% 0.030349s 90% 0.030349s 99% 0.030349s Write: 10% 0.030148s 30% 0.030148s 50% 0.030148s 90% 0.030148s 99% 0.030148s Write: 10% 0.030791s 30% 0.030791s 50% 0.030791s 90% 0.030791s 99% 0.030791s Write: 10% 0.029937s 30% 0.029937s 50% 0.029937s 90% 0.029937s 99% 0.029937s Write: 10% 0.030311s 30% 0.030311s 50% 0.030311s 90% 0.030311s 99% 0.030311s Write: 10% 0.029666s 30% 0.029666s 50% 0.029666s 90% 0.029666s 99% 0.029666s Write: 10% WriteWrite: 10% 0.028627s 30% 0.028627s 50% 0.028627s0.028854s 30% : 10% 90% 0.028627s 99% 0.028627s 0.028854s 50% 0.028854s 90% 0.028854s 99% 0.028854s 0.029470s 30% 0.029470s 50% 0.029470s 90% 0.029470s 99% 0.029470s Write: 10% 0.028313s 30% 0.028313s 50% 0.028313s 90% 0.028313s 99% 0.028313s Write: 10% 0.029634s 30% 0.029634s 50% 0.029634s 90% 0.029634s 99% 0.029634s Write: 10% 0.028985s 30% 0.028985s 50% 0.028985s 90% 0.028985s 99% 0.028985s Write: 10% 0.031092s 30% 0.031092s 50% 0.031092s 90% 0.031092s 99% 0.031092s Write: 10% 0.029086s 30% 0.029086s 50% 0.029086s 90% 0.029086s 99% 0.029086s Write: 10% 0.031000s 30% 0.031000s 50% 0.031000s 90% 0.031000s 99% 0.031000s Write: 10% 0.031108s 30% 0.031108s 50% 0.031108s 90% 0.031108s 99% 0.031108s Write: 10% 0.030909s 30% 0.030909s 50% 0.030909s 90% 0.030909s 99% 0.030909s Write: 10% 0.032313s 30% 0.032313s 50% 0.032313s 90% 0.032313s 99% 0.032313s Write: 10% 0.030651s 30% 0.030651s 50% 0.030651s 90% 0.030651s 99% 0.030651s Write: 10% 0.030444s 30% 0.030444s 50% 0.030444s 90% 0.030444s 99% 0.030444s Write: 10% 0.031651s 30% 0.031651s 50% 0.031651s 90% 0.031651s 99% 0.031651s Write: 10% 0.032355s 30% 0.032355s 50% 0.032355s 90% 0.032355s 99% 0.032355s Write: 10% 0.031716s 30% 0.031716s 50% 0.031716s 90% 0.031716s 99% 0.031716s Write: 10% 0.032580s 30% 0.032580s 50% 0.032580s 90% 0.032580s 99% 0.032580s Write: 10% 0.031279s 30% 0.031279s 50% 0.031279s 90% 0.031279s 99% 0.031279s Write: 10% 0.031158s 30% 0.031158s 50% 0.031158s 90% 0.031158s 99% 0.031158s Write: 10% 0.033589s 30% 0.033589s 50% 0.033589s 90% 0.033589s 99% 0.033589s Write: 10% 0.034349s 30% 0.034349s 50% 0.034349s 90% 0.034349s 99% 0.034349s Write: 10% 0.032825s 30% 0.032825s 50% 0.032825s 90% 0.032825s 99% 0.032825s Write: 10% 0.034349s 30% 0.034349s 50% 0.034349s 90% 0.034349s 99% 0.034349s Write: 10% 0.032579s 30% 0.032579s 50% 0.032579s 90% 0.032579s 99% 0.032579s Write: 10% 0.033060s 30% 0.033060s 50% 0.033060s 90% 0.033060s 99% 0.033060s Write: 10% 0.033249s 30% 0.033249s 50% 0.033249s 90% 0.033249s 99% 0.033249s Write: 10% 0.033924s 30% 0.033924s 50% 0.033924s 90% 0.033924s 99% 0.033924s Write: 10% 0.036169s 30% 0.036169s 50% 0.036169s 90% 0.036169s 99% 0.036169s Write: 10% 0.032893s 30% 0.032893s 50% 0.032893s 90% 0.032893s 99% 0.032893s Write: 10% 0.033145s 30% 0.033145s 50% 0.033145s 90% 0.033145s 99% 0.033145s Write: 10% 0.031837s 30% 0.031837s 50% 0.031837s 90% 0.031837s 99% 0.031837s Write: 10% 0.032258s 30% 0.032258s 50% 0.032258s 90% 0.032258s 99% 0.032258sWrite: 10% 0.032734s 30% 0.032734s 50% 0.032734s 90% 0.032734s 99% 0.032734s Write: 10% 0.031606s 30% 0.031606s 50% 0.031606s 90% 0.031606s 99% 0.031606s Write: 10% 0.032143s 30% 0.032143s 50% 0.032143s 90% 0.032143s 99% 0.032143s Write: 10% 0.034820s 30% 0.034820s 50% 0.034820s 90% 0.034820s 99% 0.034820s Write: 10% 0.032989s 30% 0.032989s 50% 0.032989s 90% 0.032989s 99% 0.032989s Write: 10% 0.032716s 30% 0.032716s 50% WriteWrite: 10% : 10% 0.033019s0.032716s 90% WriteWrite0.034062s0.032716s: 10% 0.032469s 99% 0.032716s 30% 30% 0.032469s 50% 0.032469sWrite 90% 0.032469s 99% 0.032469s : 10% 0.033019s 50% 0.034597s: 10% 30% 0.034597s 50% 0.034597s 30% 90% 0.034597s 99% 0.034597s0.033019s0.034062s 50% 0.034062s 90% 0.034062s0.033795s 90% 99% 0.034062s0.033019s 30% 0.033795s 99% 50% 0.033795s 90% 0.033795s 99% 0.033795s 0.033019s Read: 10% 0.197437s 30% 0.197437s 50% 0.197437s 90% 0.197437s 99% 0.197437s Step 3. write modify Write: 10% 0.010754s 30% 0.010754s 50% 0.010754s 90% 0.010754s 99% 0.010754s Write: 10% 0.010569s 30% 0.010569s 50% 0.010569s 90% 0.010569s 99% 0.010569s Write: 10% 0.012266s 30% 0.012266s 50% 0.012266s 90% 0.012266s 99% 0.012266s Write: 10% 0.013047s 30% 0.013047s 50% 0.013047s 90% 0.013047s 99% 0.013047s Write: 10% 0.014394s 30% 0.014394s 50% 0.014394s 90% 0.014394s 99% 0.014394s Write: 10% 0.014765s 30% 0.014765s 50% 0.014765s 90% 0.014765s 99% 0.014765s Write: 10% 0.014865s 30% 0.014865s 50% 0.014865s 90% 0.014865s 99% 0.014865s Write: 10% 0.018605s 30% 0.018605s 50% 0.018605s 90% 0.018605s 99% 0.018605s Write: 10% 0.021741s 30% 0.021741s 50% 0.021741s 90% 0.021741s 99% 0.021741s Write: 10% 0.021877s 30% 0.021877s 50% 0.021877s 90% 0.021877s 99% 0.021877s Write: 10% 0.024250s 30% 0.024250s 50% 0.024250s 90% 0.024250s 99% 0.024250s Write: 10% 0.024167s 30% 0.024167s 50% 0.024167s 90% 0.024167s 99% 0.024167s Write: 10% 0.023818s 30% 0.023818s 50% 0.023818s 90% 0.023818s 99% 0.023818s Write: 10% 0.026269s 30% 0.026269s 50% 0.026269s 90% 0.026269s 99% 0.026269s Write: 10% 0.025744s 30% 0.025744s 50% 0.025744s 90% 0.025744s 99% 0.025744s Write: 10% 0.026205s 30% 0.026205s 50% 0.026205s 90% 0.026205s 99% 0.026205s Write: 10% 0.027161s 30% 0.027161s 50% 0.027161s 90% 0.027161s 99% 0.027161s Write: 10% 0.026972s 30% 0.026972s 50% 0.026972s 90% 0.026972s 99% 0.026972s Write: 10% 0.027733s 30% 0.027733s 50% 0.027733s 90% 0.027733s 99% 0.027733s Write: 10% 0.026304s 30% 0.026304s 50% 0.026304s 90% 0.026304s 99% 0.026304s Write: 10% 0.028112s 30% 0.028112s 50% 0.028112s 90% 0.028112s 99% 0.028112s Write: 10% 0.027042s 30% 0.027042s 50% 0.027042s 90% 0.027042s 99% 0.027042s Write: 10% 0.027602s 30% 0.027602s 50% 0.027602s 90% 0.027602s 99% 0.027602s Write: 10% 0.027424s 30% 0.027424s 50% 0.027424s 90% 0.027424s 99% 0.027424s Write: 10% 0.028010s 30% 0.028010s 50% 0.028010s 90% 0.028010s 99% 0.028010s Write: 10% 0.028300s 30% 0.028300s 50% 0.028300s 90% 0.028300s 99% 0.028300s Write: 10% 0.029484s 30% 0.029484s 50% 0.029484s 90% 0.029484s 99% 0.029484s Write: 10% 0.029234s 30% 0.029234s 50% 0.029234s 90% 0.029234s 99% 0.029234s Write: 10% 0.030036s 30% 0.030036s 50% 0.030036s 90% 0.030036s 99% 0.030036s Write: 10% 0.028720s 30% 0.028720s 50% 0.028720s 90% 0.028720s 99% 0.028720s Write: 10% 0.030962s 30% 0.030962s 50% 0.030962s 90% 0.030962s 99% 0.030962s Write: 10% 0.030931s 30% 0.030931s 50% 0.030931s 90% 0.030931s 99% 0.030931s Write: 10% 0.031068s 30% 0.031068s 50% 0.031068s 90% 0.031068s 99% 0.031068s Write: 10% 0.028829s 30% 0.028829s 50% 0.028829s 90% 0.028829s 99% 0.028829s Write: 10% 0.030974s 30% 0.030974s 50% 0.030974s 90% 0.030974s 99% 0.030974s Write: 10% 0.030926s 30% 0.030926s 50% 0.030926s 90% 0.030926s 99% 0.030926s Write: 10% 0.033038s 30% 0.033038s 50% 0.033038s 90% 0.033038s 99% 0.033038s Write: 10% 0.033266s 30% 0.033266s 50% 0.033266s 90% 0.033266s 99% 0.033266s Write: 10% 0.031875s 30% 0.031875s 50% 0.031875s 90% 0.031875s 99% 0.031875s Write: 10% 0.031569s 30% 0.031569s 50% 0.031569s 90% 0.031569s 99% 0.031569s Write: 10% 0.032910s 30% 0.032910s 50% 0.032910s 90% 0.032910s 99% 0.032910s Write: 10% 0.031292s 30% 0.031292s 50% 0.031292s 90% 0.031292s 99% 0.031292s Write: 10% 0.031239s 30% 0.031239s 50% 0.031239s 90% 0.031239s 99% 0.031239s Write: 10% 0.032040s 30% 0.032040s 50% 0.032040s 90% 0.032040s 99% 0.032040s Write: 10% 0.031539s 30% 0.031539s 50% 0.031539s 90% 0.031539s 99% 0.031539s Write: 10% 0.032260s 30% 0.032260s 50% 0.032260s 90% 0.032260s 99% 0.032260s Write: 10% 0.031361s 30% 0.031361s 50% 0.031361s 90% 0.031361s 99% 0.031361s Write: 10% 0.031180s 30% 0.031180s 50% 0.031180s 90% 0.031180s 99% 0.031180s Write: 10% 0.030766s 30% 0.030766s 50% 0.030766s 90% 0.030766s 99% 0.030766s Write: 10% 0.029392s 30% 0.029392s 50% 0.029392s 90% 0.029392s 99% 0.029392s Write: 10% 0.030278s 30% 0.030278s 50% 0.030278s 90% 0.030278s 99% 0.030278s Write: 10% 0.031878s 30% 0.031878s 50% 0.031878s 90% 0.031878s 99% 0.031878s Write: 10% 0.028830s 30% 0.028830s 50% 0.028830s 90% 0.028830s 99% 0.028830sWrite: 10% 0.029852s 30% 0.029852s 50% 0.029852s 90% 0.029852s 99% 0.029852s Write: 10% 0.029451s 30% 0.029451s 50% 0.029451s 90% 0.029451s 99% 0.029451s Write: 10% 0.030250s 30% 0.030250s 50% 0.030250s 90% 0.030250s 99% 0.030250s Write: 10% 0.030783s 30% 0.030783s 50% 0.030783s 90% 0.030783s 99% 0.030783sWrite: 10% 0.031467s 30% Write: 10% 0.031303s 30% 0.031303s 50% 0.031303s 90% 0.031303s 99% 0.031303s 0.031467s 50% 0.031467s 90% 0.031467s 99% 0.031467s Write: 10% Write: 10% 0.030615s 30% 0.030615s 50% 0.030615s 90% 0.030615s 99% 0.030615s0.029463s 30% 0.029463s 50% 0.029463s 90% 0.029463s 99% 0.029463s Write: 10% 0.029796s 30% 0.029796s 50% 0.029796s 90% 0.029796s 99% 0.029796s Write: 10% 0.030166s 30% 0.030166s 50% 0.030166s 90% 0.030166s 99% 0.030166s Write: 10% 0.026411s 30% 0.026411s 50% 0.026411s 90% 0.026411s 99% 0.026411s Update: 10% 0.037588s 30% 0.037588s 50% 0.037588s 90% 0.037588s 99% 0.037588s Step 4. read modify write Write: 10% 0.069171s 30% 0.069171s 50% 0.069171s 90% 0.069171s 99% 0.069171s Write: 10% 0.081412s 30% 0.081412s 50% 0.081412s 90% 0.081412s 99% 0.081412s Write: 10% 0.082390s 30% 0.082390s 50% 0.082390s 90% 0.082390s 99% 0.082390s Write: 10% 0.082508s 30% 0.082508s 50% 0.082508s 90% 0.082508s 99% 0.082508s Write: 10% 0.121299s 30% 0.121299s 50% 0.121299s 90% 0.121299s 99% 0.121299s Write: 10% 0.126496s 30% 0.126496s 50% 0.126496s 90% 0.126496s 99% 0.126496s Write: 10% 0.131127s 30% 0.131127s 50% 0.131127s 90% 0.131127s 99% 0.131127s Write: 10% 0.131588s 30% 0.131588s 50% 0.131588s 90% 0.131588s 99% 0.131588s Write: 10% 0.132235s 30% 0.132235s 50% 0.132235s 90% 0.132235s 99% 0.132235s Write: 10% 0.133886s 30% 0.133886s 50% 0.133886s 90% 0.133886s 99% 0.133886s Write: 10% 0.138698s 30% 0.138698s 50% 0.138698s 90% 0.138698s 99% 0.138698s Write: 10% 0.137132s 30% 0.137132s 50% 0.137132s 90% 0.137132s 99% 0.137132s Write: 10% 0.138466s 30% 0.138466s 50% 0.138466s 90% 0.138466s 99% 0.138466s Write: 10% 0.135628s 30% 0.135628s 50% 0.135628s 90% 0.135628s 99% 0.135628s Write: 10% Write: 10% 0.135917s 30% 0.137043s 30% 0.137043s 50% 0.137043s 90% 0.137043s 99% 0.137043s 0.135917s 50% 0.135917s 90% 0.135917s 99% 0.135917s Write: 10% 0.138364s 30% 0.138364s 50% 0.138364s 90% 0.138364s 99% 0.138364s Write: 10% 0.138282s 30% 0.138282s 50% 0.138282s 90% 0.138282s 99% 0.138282s Write: 10% 0.138881s 30% 0.138881s 50% 0.138881s 90% 0.138881s 99% 0.138881s Write: 10% 0.139443s 30% 0.139443s 50% 0.139443s 90% 0.139443s 99% 0.139443s Write: 10% 0.135712s 30% 0.135712s 50% 0.135712s 90% 0.135712s 99% 0.135712s Write: 10% 0.137320s 30% 0.137320s 50% 0.137320s 90% 0.137320s 99% 0.137320s Write: 10% 0.136355s 30% 0.136355s 50% 0.136355s 90% 0.136355s 99% 0.136355s Write: 10% 0.132245s 30% 0.132245s 50% 0.132245s 90% 0.132245s 99% 0.132245s Write: 10% 0.135425s 30% 0.135425s 50% 0.135425s 90% 0.135425s 99% 0.135425s Write: 10% 0.136044s 30% 0.136044s 50% 0.136044s 90% 0.136044s 99% 0.136044s Write: 10% 0.139348s 30% 0.139348s 50% 0.139348s 90% 0.139348s 99% 0.139348s Write: 10% 0.135723s 30% 0.135723s 50% 0.135723s 90% 0.135723s 99% 0.135723s Write: 10% 0.135308s 30% 0.135308s 50% 0.135308s 90% 0.135308s 99% 0.135308s Write: 10% 0.136449s 30% 0.136449s 50% 0.136449s 90% 0.136449s 99% 0.136449s Write: 10% 0.137830s 30% 0.137830s 50% 0.137830s 90% 0.137830s 99% 0.137830s Write: 10% 0.142461s 30% 0.142461s 50% 0.142461s 90% 0.142461s 99% 0.142461s Write: 10% 0.138221s 30% 0.138221s 50% 0.138221s 90% 0.138221s 99% 0.138221s Write: 10% 0.143507s 30% 0.143507s 50% 0.143507s 90% 0.143507s 99% 0.143507s Write: 10% 0.139076s 30% 0.139076s 50% 0.139076s 90% 0.139076s 99% 0.139076s Write: 10% 0.143259s 30% 0.143259s 50% 0.143259s 90% 0.143259s 99% 0.143259s Write: 10% 0.139677s 30% 0.139677s 50% 0.139677s 90% 0.139677s 99% 0.139677s Write: 10% 0.148055s 30% 0.148055s 50% 0.148055s 90% 0.148055s 99% 0.148055s Write: 10% 0.149686s 30% 0.149686s 50% 0.149686s 90% 0.149686s 99% 0.149686s Write: 10% 0.150087s 30% 0.150087s 50% 0.150087s 90% 0.150087s 99% 0.150087s Write: 10% 0.145337s 30% 0.145337s 50% 0.145337s 90% 0.145337s 99% 0.145337s Write: 10% 0.153462s 30% 0.153462s 50% 0.153462s 90% 0.153462s 99% 0.153462s Write: 10% 0.150617s 30% 0.150617s 50% 0.150617s 90% 0.150617s 99% 0.150617s Write: 10% 0.149410s 30% 0.149410s 50% 0.149410s 90% 0.149410s 99% 0.149410s Write: 10% 0.157612s 30% 0.157612s 50% 0.157612s 90% 0.157612s 99% 0.157612s Write: 10% 0.150795s 30% 0.150795s 50% 0.150795s 90% 0.150795s 99% 0.150795s Write: 10% 0.150989s 30% 0.150989s 50% 0.150989s 90% 0.150989s 99% 0.150989s Write: 10% 0.151401s 30% 0.151401s 50% 0.151401s 90% 0.151401s 99% 0.151401s Write: 10% 0.156545s 30% 0.156545s 50% 0.156545s 90% 0.156545s 99% 0.156545s Write: 10% 0.155634s 30% 0.155634s 50% 0.155634s 90% 0.155634s 99% 0.155634s Write: 10% 0.152825s 30% 0.152825s 50% 0.152825s 90% 0.152825s 99% 0.152825s Write: 10% 0.158711s 30% 0.158711s 50% 0.158711s 90% 0.158711s 99% 0.158711s Write: 10% 0.153196s 30% 0.153196s 50% 0.153196s 90% 0.153196s 99% 0.153196s Write: 10% 0.156270s 30% 0.156270s 50% 0.156270s 90% 0.156270s 99% 0.156270s Write: 10% 0.157907s 30% 0.157907s 50% 0.157907s 90% 0.157907s 99% 0.157907s Write: 10% 0.155215s 30% 0.155215s 50% 0.155215s 90% 0.155215s 99% 0.155215s Write: 10% 0.157973s 30% 0.157973s 50% 0.157973s 90% 0.157973s 99% 0.157973s Write: 10% 0.157329s 30% Write: 10% 0.156078s 30% 0.156078s 50% 0.157329s 50% 0.157329s 90% 0.157329s 99% 0.157329s 0.156078s 90% 0.156078s 99% 0.156078s Write: 10% 0.155127s 30% 0.155127s 50% 0.155127s 90% 0.155127sWrite: 10% 0.154728s 30% 0.154728s 50% 0.154728s 90% 0.154728s 99% 0.154728s 99% 0.155127s Write: 10% 0.159443s 30% 0.159443s 50% 0.159443s 90% 0.159443s 99% 0.159443s Write: 10% 0.158922s 30% 0.158922s 50% 0.158922s 90% 0.158922s 99% 0.158922s Write: 10% 0.159269s 30% 0.159269s 50% 0.159269s 90% 0.159269s 99% 0.159269s Update: 10% 0.021937s 30% 0.021937s 50% 0.156423s 90% 0.156423s 99% 0.156423s Read: 10% 0.303463s 30% 0.303463s 50% 0.303463s 90% 0.303463s 99% 0.303463s |98.9%| [TM] {RESULT} ydb/tests/olap/high_load/unittest |98.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stability/tool/import_test >> tool::import_test [GOOD] |98.9%| [TS] {RESULT} ydb/tests/stability/tool/import_test >> test_clickbench.py::TestClickbench::test_clickbench[0] >> test_quota_exhaustion.py::TestYdbWorkload::test_delete [GOOD] >> test_ydb_backup.py::TestPermissionsBackupRestoreFolderWithTable::test_folder_with_table >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp-pk_types34-all_types34-index34---] [GOOD] |98.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_String-pk_types29-all_types29-index29---] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> unstable_connection.py::TestUnstableConnection::test [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000148/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000148/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 117343 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_dml.py::TestDML::test_dml[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows >> test_workload.py::TestYdbLogWorkload::test[row] >> ParseOptionsTest::EndpointAndDatabaseFromCommandLine >> test_ydb_backup.py::TestPermissionsBackupRestoreFolderWithTable::test_folder_with_table [GOOD] >> ParseOptionsTest::EndpointAndDatabaseFromCommandLine [GOOD] >> ParseOptionsTest::NoDiscoveryCommandLine [GOOD] >> ParseOptionsTest::EndpointAndDatabaseFromActiveProfile >> ParseOptionsTest::EndpointAndDatabaseFromActiveProfile [GOOD] >> ParseOptionsTest::EndpointAndDatabaseFromExplicitProfile [GOOD] >> ParseOptionsTest::IamToken >> ParseOptionsTest::IamToken [GOOD] >> ParseOptionsTest::YdbToken |99.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp-pk_types34-all_types34-index34---] [GOOD] >> ParseOptionsTest::YdbToken [GOOD] >> ParseOptionsTest::StaticCredentials >> ParseOptionsTest::StaticCredentials [GOOD] >> ParseOptionsTest::AnonymousCredentials >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows [GOOD] >> ParseOptionsTest::AnonymousCredentials [GOOD] >> ParseOptionsTest::EnvPriority [GOOD] >> YdbDump::NotNullTypeDump >> YdbDump::NotNullTypeDump [GOOD] >> YdbTopic::SupportedCodecs_TopicCreate_DefaultValue >> YdbTopic::SupportedCodecs_TopicCreate_DefaultValue [GOOD] >> YdbTopic::SupportedCodecs_TopicCreate_UserValue >> YdbTopic::SupportedCodecs_TopicCreate_UserValue [GOOD] >> YdbTopic::SupportedCodecs_TopicAlter >> YdbTopic::SupportedCodecs_TopicAlter [GOOD] >> YdbTopic::SupportedCodecs_TopicConsumerAdd_DefaultValue >> YdbTopic::SupportedCodecs_TopicConsumerAdd_DefaultValue [GOOD] >> YdbTopic::SupportedCodecs_TopicConsumerAdd_UserValue >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int8-pk_types21-all_types21-index21---] [GOOD] >> YdbTopic::SupportedCodecs_TopicConsumerAdd_UserValue [GOOD] >> YdbWorkloadTopic::Default_RunFull |99.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_DyNumber-pk_types28-all_types28-index28---] [GOOD] |99.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date32-pk_types36-all_types36-index36---] [GOOD] |99.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] |99.0%| [TA] $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal150-pk_types25-all_types25-index25---] [GOOD] |99.0%| [TA] {RESULT} $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). |99.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int8-pk_types21-all_types21-index21---] [GOOD] |99.1%| [TA] $(B)/ydb/tests/datashard/dml/test-results/py3test/{meta.json ... results_accumulator.log} |99.1%| [TA] {RESULT} $(B)/ydb/tests/datashard/dml/test-results/py3test/{meta.json ... results_accumulator.log} >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval-pk_types35-all_types35-index35---] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] |99.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test_delete [GOOD] Test command err: Database name /Root/test upsert #0 ok, result: [] Quota exceeded False upsert #1 ok, result: [] Quota exceeded False upsert #2 ok, result: [] Quota exceeded False upsert #3 ok, result: [] Quota exceeded False upsert #4 ok, result: [] Quota exceeded False upsert #5 ok, result: [] Quota exceeded False upsert #6 ok, result: [] Quota exceeded False upsert #7 ok, result: [] Quota exceeded False upsert #8 ok, result: [] Quota exceeded False upsert #9 ok, result: [] Quota exceeded False upsert #10 ok, result: [] Quota exceeded False upsert #11 ok, result: [] Quota exceeded False upsert #12 ok, result: [] Quota exceeded False upsert #13 ok, result: [] Quota exceeded False upsert #14 ok, result: [] Quota exceeded False upsert #15 ok, result: [] Quota exceeded False upsert #16 ok, result: [] Quota exceeded False upsert #17 ok, result: [] Quota exceeded False upsert #18 ok, result: [] Quota exceeded False upsert #19 ok, result: [] Quota exceeded False upsert #20 ok, result: [] Quota exceeded False upsert #21 ok, result: [] Quota exceeded False upsert #22 ok, result: [] Quota exceeded False upsert #23 ok, result: [] Quota exceeded False upsert #24 ok, result: [] Quota exceeded False upsert: got overload issue delete #0 ok delete #1 ok delete #2 ok delete #3 ok delete #4 ok delete #5 ok delete #6 ok delete #7 ok delete #8 ok delete #9 ok delete #10 ok delete #11 ok delete #12 ok delete #13 ok delete #14 ok delete #15 ok delete #16 ok delete #17 ok delete #18 ok delete #19 ok delete #20 ok delete #21 ok delete #22 ok delete #23 ok delete #24 ok delete #25 ok delete #26 ok delete #27 ok delete #28 ok delete #29 ok delete #30 ok delete #31 ok delete #32 ok delete #33 ok delete #34 ok delete #35 ok delete #36 ok delete #37 ok delete #38 ok delete #39 ok delete #40 ok delete #41 ok delete #42 ok delete #43 ok delete #44 ok delete #45 ok delete #46 ok delete #47 ok delete #48 ok delete #49 ok delete #50 ok delete #51 ok delete #52 ok delete #53 ok delete #54 ok delete #55 ok delete #56 ok delete #57 ok delete #58 ok delete #59 ok delete #60 ok delete #61 ok delete #62 ok delete #63 ok delete #64 ok delete #65 ok delete #66 ok delete #67 ok delete #68 ok delete #69 ok delete #70 ok delete #71 ok delete #72 ok delete #73 ok delete #74 ok delete #75 ok delete #76 ok delete #77 ok delete #78 ok delete #79 ok delete #80 ok delete #81 ok delete #82 ok delete #83 ok delete #84 ok delete #85 ok delete #86 ok delete #87 ok delete #88 ok delete #89 ok delete #90 ok delete #91 ok delete #92 ok delete #93 ok delete #94 ok delete #95 ok delete #96 ok delete #97 ok delete #98 ok delete #99 ok delete #100 ok delete #101 ok delete #102 ok delete #103 ok delete #104 ok delete #105 ok delete #106 ok delete #107 ok delete #108 ok delete #109 ok delete #110 ok delete #111 ok delete #112 ok delete #113 ok delete #114 ok delete #115 ok delete #116 ok delete #117 ok delete #118 ok delete #119 ok delete #120 ok delete #121 ok delete #122 ok delete #123 ok delete #124 ok delete #125 ok delete #126 ok delete #127 ok delete #128 ok delete #129 ok delete #130 ok delete #131 ok delete #132 ok delete #133 ok delete #134 ok delete #135 ok delete #136 ok delete #137 ok delete #138 ok delete #139 ok delete #140 ok delete #141 ok delete #142 ok delete #143 ok delete #144 ok delete #145 ok delete #146 ok delete #147 ok delete #148 ok delete #149 ok delete #150 ok delete #151 ok delete #152 ok delete #153 ok delete #154 ok delete #155 ok delete #156 ok delete #157 ok delete #158 ok delete #159 ok delete #160 ok delete #161 ok delete #162 ok delete #163 ok delete #164 ok delete #165 ok delete #166 ok delete #167 ok delete #168 ok delete #169 ok delete #170 ok delete #171 ok delete #172 ok delete #173 ok delete #174 ok delete #175 ok delete #176 ok delete #177 ok delete #178 ok delete #179 ok delete #180 ok delete #181 ok delete #182 ok delete #183 ok delete #184 ok delete #185 ok delete #186 ok delete #187 ok delete #188 ok delete #189 ok delete #190 ok delete #191 ok delete #192 ok delete #193 ok delete #194 ok delete #195 ok delete #196 ok delete #197 ok delete #198 ok delete #199 ok delete #200 ok delete #201 ok delete #202 ok delete #203 ok delete #204 ok delete #205 ok delete #206 ok delete #207 ok delete #208 ok delete #209 ok delete #210 ok delete #211 ok delete #212 ok delete #213 ok delete #214 ok delete #215 ok delete #216 ok delete #217 ok delete #218 ok delete #219 ok delete #220 ok delete #221 ok delete #222 ok delete #223 ok delete #224 ok delete #225 ok delete #226 ok delete #227 ok delete #228 ok delete #229 ok delete #230 ok delete #231 ok delete #232 ok delete #233 ok delete #234 ok delete #235 ok delete #236 ok delete #237 ok delete #238 ok delete #239 ok delete #240 ok delete #241 ok delete #242 ok delete #243 ok delete #244 ok delete #245 ok delete #246 ok delete #247 ok delete #248 ok delete #249 ok delete #250 ok delete #251 ok delete #252 ok delete #253 ok delete #254 ok delete #255 ok delete #256 ok delete #257 ok delete #258 ok delete #259 ok delete #260 ok delete #261 ok delete #262 ok delete #263 ok delete #264 ok delete #265 ok delete #266 ok delete #267 ok delete #268 ok delete #269 ok delete #270 ok delete #271 ok delete #272 ok delete #273 ok delete #274 ok delete #275 ok delete #276 ok delete #277 ok delete #278 ok delete #279 ok delete #280 ok delete #281 ok delete #282 ok delete #283 ok delete #284 ok delete #285 ok delete #286 ok delete #287 ok delete #288 ok delete #289 ok delete #290 ok delete #291 ok delete #292 ok delete #293 ok delete #294 ok delete #295 ok delete #296 ok delete #297 ok delete #298 ok delete #299 ok delete #300 ok delete #301 ok delete #302 ok delete #303 ok delete #304 ok delete #305 ok delete #306 ok delete #307 ok delete #308 ok delete #309 ok delete #310 ok delete #311 ok delete #312 ok delete #313 ok delete #314 ok delete #315 ok delete #316 ok delete #317 ok delete #318 ok delete #319 ok delete #320 ok delete #321 ok delete #322 ok delete #323 ok delete #324 ok delete #325 ok delete #326 ok delete #327 ok delete #328 ok delete #329 ok delete #330 ok delete #331 ok delete #332 ok delete #333 ok delete #334 ok delete #335 ok delete #336 ok delete #337 ok delete #338 ok delete #339 ok delete #340 ok delete #341 ok delete #342 ok delete #343 ok delete #344 ok delete #345 ok delete #346 ok delete #347 ok delete #348 ok delete #349 ok delete #350 ok delete #351 ok delete #352 ok delete #353 ok delete #354 ok delete #355 ok delete #356 ok delete #357 ok delete #358 ok delete #359 ok delete #360 ok delete #361 ok delete #362 ok delete #363 ok delete #364 ok delete #365 ok delete #366 ok delete #367 ok delete #368 ok delete #369 ok delete #370 ok delete #371 ok delete #372 ok delete #373 ok delete #374 ok delete #375 ok delete #376 ok delete #377 ok delete #378 ok delete #379 ok delete #380 ok delete #381 ok delete #382 ok delete #383 ok delete #384 ok delete #385 ok delete #386 ok delete #387 ok delete #388 ok delete #389 ok delete #390 ok delete #391 ok delete #392 ok delete #393 ok delete #394 ok delete #395 ok delete #396 ok delete #397 ok delete #398 ok delete #399 ok delete #400 ok delete #401 ok delete #402 ok delete #403 ok delete #404 ok delete #405 ok delete #406 ok delete #407 ok delete #408 ok delete #409 ok delete #410 ok delete #411 ok delete #412 ok delete #413 ok delete #414 ok delete #415 ok delete #416 ok delete #417 ok delete #418 ok delete #419 ok delete #420 ok delete #421 ok delete #422 ok delete #423 ok delete #424 ok delete #425 ok delete #426 ok delete #427 ok delete #428 ok delete #429 ok delete #430 ok delete #431 ok delete #432 ok delete #433 ok delete #434 ok delete #435 ok delete #436 ok delete #437 ok delete #438 ok delete #439 ok delete #440 ok delete #441 ok delete #442 ok delete #443 ok delete #444 ok delete #445 ok delete #446 ok delete #447 ok delete #448 ok delete #449 ok delete #450 ok delete #451 ok delete #452 ok delete #453 ok delete #454 ok delete #455 ok delete #456 ok delete #457 ok delete #458 ok delete #459 ok delete #460 ok delete #461 ok delete #462 ok delete #463 ok delete #464 ok delete #465 ok delete #466 ok delete #467 ok delete #468 ok delete #469 ok delete #470 ok delete #471 ok delete #472 ok delete #473 ok delete #474 ok delete #475 ok delete #476 ok delete #477 ok delete #478 ok delete #479 ok delete #480 ok delete #481 ok delete #482 ok delete #483 ok delete #484 ok delete #485 ok delete #486 ok delete #487 ok delete #488 ok delete #489 ok delete #490 ok delete #491 ok delete #492 ok delete #493 ok delete #494 ok delete #495 ok delete #496 ok delete #497 ok delete #498 ok delete #499 ok ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000178/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000178/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 104970 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.NO] [GOOD] >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.WARNING] >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.WARNING] [GOOD] >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.ERROR] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.ERROR] [GOOD] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.NO] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Utf8-pk_types30-all_types30-index30---] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] >> test_ydb_backup.py::TestPermissionsBackupRestoreDontOverwriteOnAlreadyExisting::test_dont_overwrite_on_already_existing >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] |99.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal150-pk_types25-all_types25-index25---] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] >> zip_bomb.py::TestZipBomb::test [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] |99.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval-pk_types35-all_types35-index35---] [GOOD] >> test_ydb_backup.py::TestPermissionsBackupRestoreDontOverwriteOnAlreadyExisting::test_dont_overwrite_on_already_existing [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_dynumber >> test_kv.py::TestYdbKvWorkload::test_dynumber [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[0] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[1] >> YdbWorkloadTopic::Default_RunFull [GOOD] >> YdbWorkloadTopic::Init_Clean >> test_clickbench.py::TestClickbench::test_clickbench[1] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[2] >> test_clickbench.py::TestClickbench::test_clickbench[2] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[3] |99.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/oom/py3test >> overlapping_portions.py::TestOverlappingPortions::test [FAIL] >> YdbWorkloadTopic::Init_Clean [GOOD] >> YdbWorkloadTopic::Clean_Without_Init >> YdbWorkloadTopic::Clean_Without_Init [GOOD] >> YdbWorkloadTopic::Double_Init >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint64-pk_types22-all_types22-index22---] [GOOD] >> YdbWorkloadTopic::Double_Init [GOOD] >> YdbWorkloadTopic::Read_Statistics >> test_clickbench.py::TestClickbench::test_clickbench[3] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[4] |99.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[4] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[5] |99.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Utf8-pk_types30-all_types30-index30---] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[5] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[6] >> test_clickbench.py::TestClickbench::test_clickbench[6] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[7] >> test_clickbench.py::TestClickbench::test_clickbench[7] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[8] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[8] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[9] >> YdbWorkloadTopic::Read_Statistics [GOOD] >> YdbWorkloadTopic::Write_Statistics >> test_clickbench.py::TestClickbench::test_clickbench[9] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[10] >> test_clickbench.py::TestClickbench::test_clickbench[10] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[11] >> test_clickbench.py::TestClickbench::test_clickbench[11] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[12] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] |99.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint64-pk_types22-all_types22-index22---] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[12] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[13] >> test_clickbench.py::TestClickbench::test_clickbench[13] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[14] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint8-pk_types24-all_types24-index24---] [GOOD] >> YdbWorkloadTopic::Write_Statistics [GOOD] >> YdbWorkloadTopic::ReadWrite_Statistics >> test_ydb_backup.py::TestPermissionsBackupRestoreSchemeOnly::test_scheme_only >> test_clickbench.py::TestClickbench::test_clickbench[14] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[15] >> test_crud.py::TestYdbCrudOperations::test_crud_operations >> test_clickbench.py::TestClickbench::test_clickbench[15] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[16] |99.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint8-pk_types24-all_types24-index24---] [GOOD] |99.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[16] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[17] |99.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[17] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[18] >> test_ydb_backup.py::TestPermissionsBackupRestoreSchemeOnly::test_scheme_only [GOOD] >> test_crud.py::TestYdbCrudOperations::test_crud_operations [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[18] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[19] >> test_clickbench.py::TestClickbench::test_clickbench[19] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[20] >> YdbWorkloadTopic::ReadWrite_Statistics [GOOD] >> YdbWorkloadTopic::Write_Statistics_UseTx >> test_clickbench.py::TestClickbench::test_clickbench[20] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[21] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.NO] [GOOD] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.WARNING] >> test_clickbench.py::TestClickbench::test_clickbench[21] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[22] >> test_clickbench.py::TestClickbench::test_clickbench[22] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[23] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.WARNING] [GOOD] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.ERROR] >> test_clickbench.py::TestClickbench::test_clickbench[23] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[24] >> test_log_scenario.py::TestLogScenario::test[180] >> test_clickbench.py::TestClickbench::test_clickbench[24] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[25] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.ERROR] [GOOD] >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.NO] >> YdbWorkloadTopic::Write_Statistics_UseTx [GOOD] >> YdbWorkloadTopic::Full_Statistics_UseTx >> test_clickbench.py::TestClickbench::test_clickbench[25] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[26] |99.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[26] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[27] >> test_clickbench.py::TestClickbench::test_clickbench[27] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[28] >> test_clickbench.py::TestClickbench::test_clickbench[28] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[29] |99.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/py3test >> test_crud.py::TestYdbCrudOperations::test_crud_operations [GOOD] |99.3%| [TM] {RESULT} ydb/tests/sql/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date-pk_types32-all_types32-index32---] [GOOD] >> YdbWorkloadTopic::Full_Statistics_UseTx [GOOD] >> YdbWorkloadTopic::WriteInTx >> test_clickbench.py::TestClickbench::test_clickbench[29] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[30] >> test_ydb_backup.py::TestPermissionsBackupRestoreEmptyDir::test_empty_dir >> test_clickbench.py::TestClickbench::test_clickbench[30] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[31] >> test_clickbench.py::TestClickbench::test_clickbench[31] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[32] >> test_clickbench.py::TestClickbench::test_clickbench[32] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[33] >> test_ydb_backup.py::TestPermissionsBackupRestoreEmptyDir::test_empty_dir [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[33] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[34] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] >> test_ydb_backup.py::TestRestoreACLOption::test_restore_acl_option >> test_clickbench.py::TestClickbench::test_clickbench[34] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[35] >> test_clickbench.py::TestClickbench::test_clickbench[35] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[36] >> test_clickbench.py::TestClickbench::test_clickbench[36] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[37] >> test_clickbench.py::TestClickbench::test_clickbench[37] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[38] |99.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date-pk_types32-all_types32-index32---] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[38] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[39] >> test_clickbench.py::TestClickbench::test_clickbench[39] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[40] >> test_ydb_backup.py::TestRestoreACLOption::test_restore_acl_option [GOOD] |99.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[40] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[41] >> test_clickbench.py::TestClickbench::test_clickbench[41] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[42] >> test_clickbench.py::TestClickbench::test_clickbench[42] [GOOD] |99.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/py3test >> test_clickbench.py::TestClickbench::test_clickbench[42] [GOOD] |99.4%| [TA] $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.NO] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval64-pk_types39-all_types39-index39---] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int32-pk_types20-all_types20-index20---] [GOOD] >> YdbWorkloadTopic::WriteInTx [GOOD] >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.WARNING] >> test_workload.py::TestYdbLogWorkload::test[row] [GOOD] >> YdbWorkloadTopic::WriteProducesToAllPartitionsEvenly >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.WARNING] [GOOD] >> test_workload.py::TestYdbLogWorkload::test[column] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_all_types-pk_types12-all_types12-index12---] [GOOD] >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.ERROR] >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.ERROR] [GOOD] |99.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/py3test >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.ERROR] [GOOD] >> test_ydb_backup.py::TestRestoreNoData::test_restore_no_data >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] >> test_ydb_backup.py::TestRestoreNoData::test_restore_no_data [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] >> YdbWorkloadTopic::WriteProducesToAllPartitionsEvenly [GOOD] >> YdbWorkloadTransferTopicToTable::Default_Run |99.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int32-pk_types20-all_types20-index20---] [GOOD] |99.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_all_types-pk_types12-all_types12-index12---] [GOOD] |99.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] |99.5%| [TA] {RESULT} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} |99.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval64-pk_types39-all_types39-index39---] [GOOD] |99.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] |99.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] >> test_ydb_backup.py::TestClusterBackup::test_cluster_backup >> tier_delete.py::TestTierDelete::test_delete_s3_ttl [GOOD] |99.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] >> test_workload.py::TestYdbKvWorkload::test[row] [GOOD] >> test_workload.py::TestYdbKvWorkload::test[column] |99.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> tier_delete.py::TestTierDelete::test_delete_s3_ttl [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/00017d/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/00017d/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 10000}, portions: 2 rows by tier: {'__DEFAULT': 54959}, portions: 4 rows by tier: {'__DEFAULT': 100000}, portions: 4 contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 30471 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal229-pk_types26-all_types26-index26---] [GOOD] >> test_ydb_backup.py::TestClusterBackup::test_cluster_backup [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] |99.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal229-pk_types26-all_types26-index26---] [GOOD] >> YdbWorkloadTransferTopicToTable::Default_Run [GOOD] >> YdbWorkloadTransferTopicToTable::Default_Init_Clean >> test_tpch.py::TestTpchS1::test_tpch[1] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[2] |99.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[2] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[3] |99.6%| [TA] $(B)/ydb/tests/datashard/split_merge/test-results/py3test/{meta.json ... results_accumulator.log} >> test_ydb_backup.py::TestDatabaseBackup::test_database_backup |99.6%| [TA] {RESULT} $(B)/ydb/tests/datashard/split_merge/test-results/py3test/{meta.json ... results_accumulator.log} >> test_tpch.py::TestTpchS1::test_tpch[3] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[4] >> test_workload.py::TestYdbLogWorkload::test[column] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[4] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[5] >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] [GOOD] >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] >> test_ydb_backup.py::TestDatabaseBackup::test_database_backup [GOOD] >> YdbWorkloadTransferTopicToTable::Default_Init_Clean [GOOD] >> YdbWorkloadTransferTopicToTable::Specific_Init_Clean >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates [FAIL] |99.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/log/tests/py3test >> test_workload.py::TestYdbLogWorkload::test[column] [GOOD] |99.6%| [TM] {RESULT} ydb/tests/stress/log/tests/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [GOOD] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates [FAIL] Test command err: Database name /Root/test upsert #0 ok, result: [] Quota exceeded False upsert #1 ok, result: [] Quota exceeded False upsert #2 ok, result: [] Quota exceeded False upsert #3 ok, result: [] Quota exceeded False upsert #4 ok, result: [] Quota exceeded False upsert #5 ok, result: [] Quota exceeded False upsert #6 ok, result: [] Quota exceeded False upsert #7 ok, result: [] Quota exceeded False upsert #8 ok, result: [] Quota exceeded False upsert #9 ok, result: [] Quota exceeded False upsert #10 ok, result: [] Quota exceeded False upsert #11 ok, result: [] Quota exceeded False upsert #12 ok, result: [] Quota exceeded False upsert #13 ok, result: [] Quota exceeded False upsert #14 ok, result: [] Quota exceeded False upsert #15 ok, result: [] Quota exceeded False upsert #16 ok, result: [] Quota exceeded False upsert #17 ok, result: [] Quota exceeded False upsert #18 ok, result: [] Quota exceeded False upsert #19 ok, result: [] Quota exceeded False upsert #20 ok, result: [] Quota exceeded False upsert #21 ok, result: [] Quota exceeded False upsert #22 ok, result: [] Quota exceeded False upsert #23 ok, result: [] Quota exceeded False upsert #24 ok, result: [] Quota exceeded False upsert #25 ok, result: [] Quota exceeded False upsert #26 ok, result: [] Quota exceeded False upsert #27 ok, result: [] Quota exceeded False upsert #28 ok, result: [] Quota exceeded False upsert #29 ok, result: [] Quota exceeded False upsert #30 ok, result: [] Quota exceeded False upsert #31 ok, result: [] Quota exceeded False upsert #32 ok, result: [] Quota exceeded False upsert #33 ok, result: [] Quota exceeded False upsert #34 ok, result: [] Quota exceeded False upsert #35 ok, result: [] Quota exceeded False upsert #36 ok, result: [] Quota exceeded False upsert #37 ok, result: [] Quota exceeded False upsert #38 ok, result: [] Quota exceeded False upsert #39 ok, result: [] Quota exceeded False upsert #40 ok, result: [] Quota exceeded False upsert #41 ok, result: [] Quota exceeded False upsert #42 ok, result: [] Quota exceeded False upsert #43 ok, result: [] Quota exceeded False upsert #44 ok, result: [] Quota exceeded False upsert #45 ok, result: [] Quota exceeded False upsert #46 ok, result: [] Quota exceeded False upsert #47 ok, result: [] Quota exceeded False upsert #48 ok, result: [] Quota exceeded False upsert #49 ok, result: [] Quota exceeded False upsert #50 ok, result: [] Quota exceeded False upsert #51 ok, result: [] Quota exceeded False upsert #52 ok, result: [] Quota exceeded False upsert #53 ok, result: [] Quota exceeded False upsert #54 ok, result: [] Quota exceeded False upsert #55 ok, result: [] Quota exceeded False upsert #56 ok, result: [] Quota exceeded False upsert #57 ok, result: [] Quota exceeded False upsert #58 ok, result: [] Quota exceeded False upsert #59 ok, result: [] Quota exceeded False upsert #60 ok, result: [] Quota exceeded False upsert #61 ok, result: [] Quota exceeded False upsert #62 ok, result: [] Quota exceeded False upsert #63 ok, result: [] Quota exceeded False upsert #64 ok, result: [] Quota exceeded False upsert #65 ok, result: [] Quota exceeded False upsert #66 ok, result: [] Quota exceeded False upsert #67 ok, result: [] Quota exceeded False upsert #68 ok, result: [] Quota exceeded False upsert #69 ok, result: [] Quota exceeded False upsert #70 ok, result: [] Quota exceeded False upsert #71 ok, result: [] Quota exceeded False upsert #72 ok, result: [] Quota exceeded False upsert #73 ok, result: [] Quota exceeded False upsert #74 ok, result: [] Quota exceeded False upsert #75 ok, result: [] Quota exceeded False upsert #76 ok, result: [] Quota exceeded False upsert #77 ok, result: [] Quota exceeded False upsert #78 ok, result: [] Quota exceeded False upsert #79 ok, result: [] Quota exceeded False upsert #80 ok, result: [] Quota exceeded False upsert #81 ok, result: [] Quota exceeded False upsert #82 ok, result: [] Quota exceeded False upsert #83 ok, result: [] Quota exceeded False upsert #84 ok, result: [] Quota exceeded False upsert #85 ok, result: [] Quota exceeded False upsert #86 ok, result: [] Quota exceeded False upsert #87 ok, result: [] Quota exceeded False upsert #88 ok, result: [] Quota exceeded False upsert #89 ok, result: [] Quota exceeded False upsert #90 ok, result: [] Quota exceeded False upsert #91 ok, result: [] Quota exceeded False upsert #92 ok, result: [] Quota exceeded False upsert #93 ok, result: [] Quota exceeded False upsert #94 ok, result: [] Quota exceeded False >> YdbWorkloadTransferTopicToTable::Specific_Init_Clean [GOOD] >> YdbWorkloadTransferTopicToTable::Clean_Without_Init >> test_ydb_backup.py::TestClusterBackupRestore::test_cluster_backup_restore >> YdbWorkloadTransferTopicToTable::Clean_Without_Init [GOOD] >> YdbWorkloadTransferTopicToTable::Double_Init >> YdbWorkloadTransferTopicToTable::Double_Init [GOOD] >> YdbWorkloadTransferTopicToTable::Statistics >> test_tpch.py::TestTpchS1::test_tpch[5] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[6] >> test_tpch.py::TestTpchS1::test_tpch[6] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[7] >> test_tpch_import.py::TestS3TpchImport::test_import_and_export [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[7] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[8] >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] >> test_ydb_backup.py::TestClusterBackupRestore::test_cluster_backup_restore [GOOD] |99.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/kv/tests/py3test >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] |99.7%| [TM] {RESULT} ydb/tests/stress/kv/tests/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/s3_import/py3test >> test_tpch_import.py::TestS3TpchImport::test_import_and_export [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/00008e/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/00008e/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 210434 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |99.7%| [TM] {RESULT} ydb/tests/olap/s3_import/py3test >> YdbWorkloadTransferTopicToTable::Statistics [GOOD] |99.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/apps/ydb/ut/unittest >> YdbWorkloadTransferTopicToTable::Statistics [GOOD] |99.7%| [TM] {RESULT} ydb/apps/ydb/ut/unittest >> test_tpch.py::TestTpchS1::test_tpch[8] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[9] >> test_tpch.py::TestTpchS1::test_tpch[9] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[10] >> test_ydb_backup.py::TestDatabaseBackupRestore::test_database_backup_restore >> test_tpch.py::TestTpchS1::test_tpch[10] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[11] >> test_tpch.py::TestTpchS1::test_tpch[11] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[12] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[12] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[13] >> test_tpch.py::TestTpchS1::test_tpch[13] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[14] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000172/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/000172/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 106011 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_tpch.py::TestTpchS1::test_tpch[14] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[15] >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[15] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[16] >> test_tpch.py::TestTpchS1::test_tpch[16] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[17] >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] [GOOD] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] >> test_ydb_backup.py::TestDatabaseBackupRestore::test_database_backup_restore [GOOD] >> test_log_scenario.py::TestLogScenario::test[180] [GOOD] >> test_log_scenario.py::TestLogScenario::test[1051200] >> test_tpch.py::TestTpchS1::test_tpch[17] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[18] >> test_tpch.py::TestTpchS1::test_tpch[18] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[19] >> test_tpch.py::TestTpchS1::test_tpch[19] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[20] |99.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_backup.py::TestDatabaseBackupRestore::test_database_backup_restore [GOOD] |99.8%| [TA] $(B)/ydb/tests/functional/ydb_cli/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TA] {RESULT} $(B)/ydb/tests/functional/ydb_cli/test-results/py3test/{meta.json ... results_accumulator.log} >> test_tpch.py::TestTpchS1::test_tpch[20] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[21] >> test_tpch.py::TestTpchS1::test_tpch[21] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[22] >> test_tpch.py::TestTpchS1::test_tpch[22] [GOOD] |99.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/py3test >> test_tpch.py::TestTpchS1::test_tpch[22] [GOOD] |99.8%| [TA] $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TA] {RESULT} $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} >> test_log_scenario.py::TestLogScenario::test[1051200] [FAIL] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/py3test >> test_log_scenario.py::TestLogScenario::test[1051200] [FAIL] Test command err: Pid 211180 upsert #0 ok, result: [] upsert #1 ok, result: [] Rss after upsert 631460 [{'column0': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx1', 'column1': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx2', 'column2': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ... xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx3', 'column3': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx4', 'column4': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx5'}] Max rss {} 3578276 |99.9%| [TM] {RESULT} ydb/tests/olap/py3test >> alter_compression.py::TestAlterCompression::test_all_supported_compression [GOOD] >> alter_compression.py::TestAlterCompression::test_availability_data >> test_alter_tiering.py::TestAlterTiering::test[many_tables] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_table] >> test_simple.py::TestSimple::test_multi[alter_table] [GOOD] >> test_simple.py::TestSimple::test[alter_table] >> test_simple.py::TestSimple::test[alter_table] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test[alter_tablestore] >> test_simple.py::TestSimple::test[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test_multi[table] [GOOD] >> test_simple.py::TestSimple::test[table] >> test_simple.py::TestSimple::test[table] [GOOD] >> test_simple.py::TestSimple::test_multi[tablestores] [GOOD] >> test_simple.py::TestSimple::test[tablestores] >> test_simple.py::TestSimple::test[tablestores] [GOOD] >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] [GOOD] >> test_alter_compression.py::TestAlterCompression::test[alter_compression] >> alter_compression.py::TestAlterCompression::test_availability_data [GOOD] |99.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAlterCompression::test_availability_data [GOOD] |99.9%| [TM] {RESULT} ydb/tests/olap/column_family/compression/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test [GOOD] Test command err: upsert #0 ok, result: [] Quota exceeded False upsert #1 ok, result: [] Quota exceeded False upsert #2 ok, result: [] Quota exceeded False upsert #3 ok, result: [] Quota exceeded False upsert #4 ok, result: [] Quota exceeded False upsert #5 ok, result: [] Quota exceeded False upsert #6 ok, result: [] Quota exceeded False upsert #7 ok, result: [] Quota exceeded False upsert #8 ok, result: [] Quota exceeded False upsert #9 ok, result: [] Quota exceeded False upsert #10 ok, result: [] Quota exceeded False upsert #11 ok, result: [] Quota exceeded False upsert #12 ok, result: [] Quota exceeded False upsert #13 ok, result: [] Quota exceeded False upsert #14 ok, result: [] Quota exceeded False upsert #15 ok, result: [] Quota exceeded False upsert #16 ok, result: [] Quota exceeded False upsert #17 ok, result: [] Quota exceeded False upsert #18 ok, result: [] Quota exceeded False upsert #19 ok, result: [] Quota exceeded False upsert #20 ok, result: [] Quota exceeded False upsert #21 ok, result: [] Quota exceeded False upsert #22 ok, result: [] Quota exceeded False upsert #23 ok, result: [] Quota exceeded False upsert #24 ok, result: [] Quota exceeded False upsert #25 ok, result: [] Quota exceeded False upsert #26 ok, result: [] Quota exceeded False upsert #27 ok, result: [] Quota exceeded False upsert #28 ok, result: [] Quota exceeded False upsert #29 ok, result: [] Quota exceeded False upsert #30 ok, result: [] Quota exceeded False upsert #31 ok, result: [] Quota exceeded False upsert #32 ok, result: [] Quota exceeded False upsert #33 ok, result: [] Quota exceeded False upsert #34 ok, result: [] Quota exceeded False upsert #35 ok, result: [] Quota exceeded False upsert #36 ok, result: [] Quota exceeded False upsert #37 ok, result: [] Quota exceeded False upsert #38 ok, result: [] Quota exceeded False upsert #39 ok, result: [] Quota exceeded False upsert #40 ok, result: [] Quota exceeded False upsert #41 ok, result: [] Quota exceeded False upsert #42 ok, result: [] Quota exceeded False upsert #43 ok, result: [] Quota exceeded False upsert #44 ok, result: [] Quota exceeded False upsert #45 ok, result: [] Quota exceeded False upsert #46 ok, result: [] Quota exceeded False upsert #47 ok, result: [] Quota exceeded False upsert #48 ok, result: [] Quota exceeded False upsert #49 ok, result: [] Quota exceeded False upsert #50 ok, result: [] Quota exceeded False upsert #51 ok, result: [] Quota exceeded False upsert #52 ok, result: [] Quota exceeded False upsert #53 ok, result: [] Quota exceeded False upsert #54 ok, result: [] Quota exceeded False upsert #55 ok, result: [] Quota exceeded False upsert #56 ok, result: [] Quota exceeded False upsert #57 ok, result: [] Quota exceeded False upsert #58 ok, result: [] Quota exceeded False upsert #59 ok, result: [] Quota exceeded False upsert #60 ok, result: [] Quota exceeded False upsert #61 ok, result: [] Quota exceeded False upsert #62 ok, result: [] Quota exceeded False upsert #63 ok, result: [] Quota exceeded False upsert #64 ok, result: [] Quota exceeded False upsert #65 ok, result: [] Quota exceeded False upsert #66 ok, result: [] Quota exceeded False upsert #67 ok, result: [] Quota exceeded False upsert #68 ok, result: [] Quota exceeded False upsert #69 ok, result: [] Quota exceeded False upsert #70 ok, result: [] Quota exceeded False upsert #71 ok, result: [] Quota exceeded False upsert #72 ok, result: [] Quota exceeded False upsert #73 ok, result: [] Quota exceeded False upsert #74 ok, result: [] Quota exceeded False upsert #75 ok, result: [] Quota exceeded False upsert #76 ok, result: [] Quota exceeded False upsert #77 ok, result: [] Quota exceeded False upsert #78 ok, result: [] Quota exceeded False upsert #79 ok, result: [] Quota exceeded False upsert #80 ok, result: [] Quota exceeded False upsert #81 ok, result: [] Quota exceeded False upsert #82 ok, result: [] Quota exceeded False upsert #83 ok, result: [] Quota exceeded False upsert #84 ok, result: [] Quota exceeded False upsert #85 ok, result: [] Quota exceeded False upsert #86 ok, result: [] Quota exceeded False upsert #87 ok, result: [] Quota exceeded False upsert #88 ok, result: [] Quota exceeded False upsert #89 ok, result: [] Quota exceeded False upsert #90 ok, result: [] Quota exceeded False upsert #91 ok, result: [] Quota exceeded False upsert #92 ok, result: [] Quota exceeded False upsert #93 ok, result: [] Quota exceeded False upsert #94 ok, result: [] Quota exceeded False upsert #95 ok, result: [] Quota exceeded False upsert #96 ok, result: [] Quota exceeded False upsert #97 ok, result: [] Quota exceeded False upsert #98 ok, result: [] Quota exceeded False upsert #99 ok, result: [] Quota exceeded False upsert #100 ok, result: [] Quota exceeded False upsert #101 ok, result: [] Quota exceeded False upsert #102 ok, result: [] Quota exceeded False upsert #103 ok, result: [] Quota exceeded False upsert #104 ok, result: [] Quota exceeded False upsert #105 ok, result: [] Quota exceeded False upsert #106 ok, result: [] Quota exceeded False upsert #107 ok, result: [] Quota exceeded False upsert #108 ok, result: [] Quota exceeded False upsert #109 ok, result: [] Quota exceeded False upsert #110 ok, result: [] Quota exceeded False upsert #111 ok, result: [] Quota exceeded False upsert #112 ok, result: [] Quota exceeded False upsert #113 ok, result: [] Quota exceeded False upsert #114 ok, result: [] Quota exceeded False upsert #115 ok, result: [] Quota exceeded False upsert #116 ok, result: [] Quota exceeded False upsert #117 ok, result: [] Quota exceeded False upsert #118 ok, result: [] Quota exceeded False upsert #119 ok, result: [] Quota exceeded False upsert #120 ok, result: [] Quota exceeded False upsert #121 ok, result: [] Quota exceeded False upsert #122 ok, result: [] Quota exceeded False upsert #123 ok, result: [] Quota exceeded False upsert #124 ok, result: [] Quota exceeded False upsert #125 ok, result: [] Quota exceeded False upsert #126 ok, result: [] Quota exceeded False upsert #127 ok, result: [] Quota exceeded False upsert #128 ok, result: [] Quota exceeded False upsert #129 ok, result: [] Quota exceeded False upsert #130 ok, result: [] Quota exceeded False upsert #131 ok, result: [] Quota exceeded False upsert #132 ok, result: [] Quota exceeded False upsert #133 ok, result: [] Quota exceeded False upsert #134 ok, result: [] Quota exceeded False upsert #135 ok, result: [] Quota exceeded False upsert #136 ok, result: [] Quota exceeded False upsert #137 ok, result: [] Quota exceeded False upsert #138 ok, result: [] Quota exceeded False upsert #139 ok, result: [] Quota exceeded False upsert #140 ok, result: [] Quota exceeded False upsert #141 ok, result: [] Quota exceeded False upsert #142 ok, result: [] Quota exceeded False upsert #143 ok, result: [] Quota exceeded False upsert #144 ok, result: [] Quota exceeded False upsert #145 ok, result: [] Quota exceeded False upsert #146 ok, result: [] Quota exceeded False upsert #147 ok, result: [] Quota exceeded False upsert #148 ok, result: [] Quota exceeded False upsert #149 ok, result: [] Quota exceeded False upsert #150 ok, result: [] Quota exceeded False upsert #151 ok, result: [] Quota exceeded False upsert #152 ok, result: [] Quota exceeded False upsert #153 ok, result: [] Quota exceeded False upsert #154 ok, result: [] Quota exceeded False upsert #155 ok, result: [] Quota exceeded False upsert #156 ok, result: [] Quota exceeded False upsert #157 ok, result: [] Quota exceeded False upsert #158 ok, result: [] Quota exceeded False upsert #159 ok, result: [] Quota exceeded False upsert #160 ok, result: [] Quota exceeded False upsert #161 ok, result: [] Quota exceeded False upsert #162 ok, result: [] Quota exceeded False upsert #163 ok, result: [] Quota exceeded False upsert #164 ok, result: [] Quota exceeded False upsert #165 ok, result: [] Quota exceeded False upsert #166 ok, result: [] Quota exceeded False upsert #167 ok, result: [] Quota exceeded False upsert #168 ok, result: [] Quota exceeded False upsert #169 ok, result: [] Quota exceeded False upsert #170 ok, result: [] Quota exceeded False upsert #171 ok, result: [] Quota exceeded False upsert #172 ok, result: [] Quota exceeded False upsert #173 ok, result: [] Quota exceeded False upsert #174 ok, result: [] Quota exceeded False upsert #175 ok, result: [] Quota exceeded False upsert #176 ok, result: [] Quota exceeded False upsert #177 ok, result: [] Quota exceeded False upsert #178 ok, result: [] Quota exceeded False upsert #179 ok, result: [] Quota exceeded False upsert #180 ok, result: [] Quota exceeded False upsert #181 ok, result: [] Quota exceeded False upsert #182 ok, result: [] Quota exceeded False upsert #183 ok, result: [] Quota exceeded False upsert #184 ok, result: [] Quota exceeded False upsert #185 ok, result: [] Quota exceeded False upsert #186 ok, result: [] Quota exceeded False upsert #187 ok, result: [] Quota exceeded False upsert #188 ok, result: [] Quota exceeded False upsert #189 ok, result: [] Quota exceeded False upsert #190 ok, result: [] Quota exceeded False upsert #191 ok, result: [] Quota exceeded False upsert #192 ok, result: [] Quota exceeded False upsert #193 ok, result: [] Quota exceeded False upsert #194 ok, result: [] Quota exceeded False upsert #195 ok, result: [] Quota exceeded False upsert #196 ok, result: [] Quota exceeded False upsert #197 ok, result: [] Quota exceeded False upsert #198 ok, result: [] Quota exceeded False upsert #199 ok, result: [] Quota exceeded False upsert #200 ok, result: [] Quota exceeded False upsert #201 ok, result: [] Quota exceeded False upsert #202 ok, result: [] Quota exceeded False upsert #203 ok, result: [] Quota exceeded False upsert #204 ok, result: [] Quota exceeded False upsert #205 ok, result: [] Quota exceeded False upsert #206 ok, result: [] Quota exceeded False upsert #207 ok, result: [] Quota exceeded False upsert #208 ok, result: [] Quota exceeded False upsert #209 ok, result: [] Quota exceeded False upsert #210 ok, result: [] Quota exceeded False upsert #211 ok, result: [] Quota exceeded False upsert #212 ok, result: [] Quota exceeded False upsert #213 ok, result: [] Quota exceeded False upsert #214 ok, result: [] Quota exceeded False upsert #215 ok, result: [] Quota exceeded False upsert #216 ok, result: [] Quota exceeded False upsert #217 ok, result: [] Quota exceeded False upsert #218 ok, result: [] Quota exceeded False upsert #219 ok, result: [] Quota exceeded False upsert #220 ok, result: [] Quota exceeded False upsert #221 ok, result: [] Quota exceeded False upsert #222 ok, result: [] Quota exceeded False upsert #223 ok, result: [] Quota exceeded False upsert #224 ok, result: [] Quota exceeded False upsert #225 ok, result: [] Quota exceeded False upsert #226 ok, result: [] Quota exceeded False upsert #227 ok, result: [] Quota exceeded False upsert #228 ok, result: [] Quota exceeded False upsert #229 ok, result: [] Quota exceeded False upsert #230 ok, result: [] Quota exceeded False upsert #231 ok, result: [] Quota exceeded False upsert #232 ok, result: [] Quota exceeded False upsert #233 ok, result: [] Quota exceeded False upsert #234 ok, result: [] Quota exceeded False upsert #235 ok, result: [] Quota exceeded False upsert #236 ok, result: [] Quota exceeded False upsert #237 ok, result: [] Quota exceeded False upsert #238 ok, result: [] Quota exceeded False upsert #239 ok, result: [] Quota exceeded False upsert #240 ok, result: [] Quota exceeded False upsert #241 ok, result: [] Quota exceeded False upsert #242 ok, result: [] Quota exceeded False upsert #243 ok, result: [] Quota exceeded False upsert #244 ok, result: [] Quota exceeded False upsert #245 ok, result: [] Quota exceeded False upsert #246 ok, result: [] Quota exceeded False upsert #247 ok, result: [] Quota exceeded False upsert #248 ok, result: [] Quota exceeded False upsert #249 ok, result: [] Quota exceeded False upsert #250 ok, result: [] Quota exceeded False upsert #251 ok, result: [] Quota exceeded False upsert #252 ok, result: [] Quota exceeded False upsert #253 ok, result: [] Quota exceeded False upsert #254 ok, result: [] Quota exceeded False upsert #255 ok, result: [] Quota exceeded False upsert #256 ok, result: [] Quota exceeded False upsert #257 ok, result: [] Quota exceeded False upsert #258 ok, result: [] Quota exceeded False upsert #259 ok, result: [] Quota exceeded False upsert #260 ok, result: [] Quota exceeded False upsert #261 ok, result: [] Quota exceeded False upsert: got overload issue |99.9%| [TA] $(B)/ydb/tests/olap/data_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/olap/data_quotas/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test 2025-05-03 08:55:04,312 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-03 08:55:04,368 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 156518 169M 173M 113M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/bb6b/00015a/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 158888 953M 955M 693M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/bb6b/00015a/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1 161995 110M 110M 82.6M └─ moto_server s3 --port 22106 Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/00015a/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/bb6b/00015a/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 171, in test if not self.wait_for( File "ydb/tests/olap/ttl_tiering/base.py", line 88, in wait_for time.sleep(1) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/bb6b/00015a/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/bb6b/00015a/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/bb6b/00015a', '--source-root', '/home/runner/.ya/build/build_root/bb6b/00015a/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/bb6b/00015a/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '1', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/bb6b/00015a/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/bb6b/00015a/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/bb6b/00015a', '--source-root', '/home/runner/.ya/build/build_root/bb6b/00015a/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/bb6b/00015a/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '1', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) |99.9%| [TA] $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/py3test >> test_alter_compression.py::TestAlterCompression::test[alter_compression] 2025-05-03 08:56:41,215 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-03 08:56:41,332 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 204876 2.0G 2.0G 1.9G ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/bb6b/000063/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules 256217 1.1G 1.1G 905M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/bb6b/000063/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alte Test command err: test_suffix, num 0, table path create_and_drop_tables start_time 1746262005.3628216 Path create_and_drop_tables removed Path create_and_drop_tables removed test_suffix, num 0, table path read_data_during_bulk_upsert0 start_time 1746262034.8092754 test_suffix, num 1, table path read_data_during_bulk_upsert1 start_time 1746262034.8097274 test_suffix, num 2, table path read_data_during_bulk_upsert2 start_time 1746262034.812057 test_suffix, num 3, table path read_data_during_bulk_upsert3 start_time 1746262034.81402 test_suffix, num 4, table path read_data_during_bulk_upsert4 start_time 1746262034.8153763 test_suffix, num 5, table path read_data_during_bulk_upsert5 start_time 1746262034.8168223 test_suffix, num 6, table path read_data_during_bulk_upsert6 start_time 1746262034.820319 test_suffix, num 7, table path read_data_during_bulk_upsert7 start_time 1746262034.8230171 test_suffix, num 8, table path read_data_during_bulk_upsert8 start_time 1746262034.8235297 test_suffix, num 9, table path read_data_during_bulk_upsert9 start_time 1746262034.8242123 Path read_data_during_bulk_upsert1 removed Path read_data_during_bulk_upsert0 removed Path read_data_during_bulk_upsert8 removed Path read_data_during_bulk_upsert2 removed Path read_data_during_bulk_upsert5 removed Path read_data_during_bulk_upsert4 removed Path read_data_during_bulk_upsert7 removed Path read_data_during_bulk_upsert9 removed Path read_data_during_bulk_upsert6 removed Path read_data_during_bulk_upsert3 removed Path read_data_during_bulk_upsert8 removed Path read_data_during_bulk_upsert3 removed Path read_data_during_bulk_upsert0 removed Path read_data_during_bulk_upsert6 removed Path read_data_during_bulk_upsert1 removed Path read_data_during_bulk_upsert9 removed Path read_data_during_bulk_upsert5 removed Path read_data_during_bulk_upsert2 removed Path read_data_during_bulk_upsert7 removed Path read_data_during_bulk_upsert4 removed test_suffix, num 0, table path read_data_during_bulk_upsert start_time 1746262145.5166698 Path read_data_during_bulk_upsert removed Path read_data_during_bulk_upsert removed test_suffix, num 0, table path many_tables start_time 1746262268.7329934 Path many_tables removed contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/moto/py3/moto/s3/models.py:122: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). Path many_tables removed test_suffix, num 0, table path alter_table start_time 1746262413.7490542 Path alter_table removed Path alter_table removed test_suffix, num 0, table path alter_tablestore start_time 1746262414.446723 Path alter_tablestore removed Path alter_tablestore removed test_suffix, num 0, table path table start_time 1746262415.0460398 Path table removed Path table removed test_suffix, num 0, table path tablestores start_time 1746262415.4645634 Path tablestores removed Path tablestores removed test_suffix, num 0, table path alter_compression start_time 1746262429.9980085 Path alter_compression removed File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "/home/runner/.ya/build/build_root/bb6b/000063/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 100, in test self._test_suffix(ctx, get_external_param("table_suffix", ""), exit_codes, 0) File "/home/runner/.ya/build/build_root/bb6b/000063/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 110, in _test_suffix ctx.executable(self, ctx) File "ydb/tests/olap/scenario/test_alter_compression.py", line 339, in scenario_alter_compression self._scenario( File "ydb/tests/olap/scenario/test_alter_compression.py", line 231, in _scenario assert self._read_data(ctx, tables=tables, column_names=column_names) File "ydb/tests/olap/scenario/test_alter_compression.py", line 197, in _read_data scan_result = sth.execute_scan_query( File "contrib/python/allure-python-commons/allure_commons/_allure.py", line 202, in impl return func(*a, **kw) File "ydb/tests/olap/scenario/helpers/scenario_tests_helper.py", line 461, in execute_scan_query for result_set in it: File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/ydb/py3/ydb/table.py", line 1196, in lambda resp: _wrap_scan_query_response(resp, self._table_client_settings), File "contrib/python/ydb/py3/ydb/table.py", line 968, in _wrap_scan_query_response return ScanQueryResult(response.result, table_client_settings) File "contrib/python/ydb/py3/ydb/table.py", line 940, in __init__ self.result_set = convert.ResultSet.from_message(self._result.result_set, table_client_settings) File "contrib/python/ydb/py3/ydb/convert.py", line 375, in from_message v_type = value.WhichOneof("value") File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Caught base exception, num 0 message Graceful shutdown requested Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/build/build_root/bb6b/000063/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/bb6b/000063/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/bb6b/000063/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/bb6b/000063', '--source-root', '/home/runner/.ya/build/build_root/bb6b/000063/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/bb6b/000063/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/build/build_root/bb6b/000063/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/bb6b/000063/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/bb6b/000063/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/bb6b/000063', '--source-root', '/home/runner/.ya/build/build_root/bb6b/000063/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/bb6b/000063/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) |99.9%| [TM] {RESULT} ydb/tests/olap/scenario/py3test |99.9%| CLEANING BUILD ROOT Number of suites skipped by size: 5 ydb/tests/olap [size:medium] ------ sole chunk ran 3 tests (total:316.56s - test:304.03s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 8.1G (8525980K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 210780 44.8M 44.8M 6.5M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 210828 31.4M 19.7M 7.2M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 210833 399M 402M 345M └─ ydb-tests-olap --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules 228964 7.4G 7.4G 6.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/test-results/py3test/testing_out_stuff/test_log_sce 253125 122M 126M 94.9M ├─ ydb -e grpc://localhost:16070 -d /Root workload log --path log run upsert --seconds 30 --threads 10 --rows 500 --timestamp_deviation 1051200 253126 66.9M 69.8M 38.5M ├─ ydb -e grpc://localhost:16070 -d /Root workload log --path log run bulk_upsert --seconds 30 --threads 10 --rows 500 --timestamp_deviation 1051200 253127 118M 121M 89.8M └─ ydb -e grpc://localhost:16070 -d /Root workload log --path log run insert --seconds 30 --threads 10 --rows 500 --timestamp_deviation 1051200 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/test-results/py3test/testing_out_stuff/stderr [fail] test_log_scenario.py::TestLogScenario::test[1051200] [default-linux-x86_64-relwithdebinfo] (45.69s) ydb/tests/olap/test_log_scenario.py:152: in test thread.join() ydb/tests/olap/common/thread_helper.py:18: in join raise self.exc ydb/tests/olap/common/thread_helper.py:11: in run self.ret = self._target(*self._args, **self._kwargs) ydb/tests/olap/test_log_scenario.py:117: in aggregation_query self.ydb_client.query(f"SELECT COUNT(*) FROM `{self.table_name}` ") ydb/tests/olap/common/ydb_client.py:24: in query return self.session_pool.execute_with_retries(statement) contrib/python/ydb/py3/ydb/query/pool.py:204: in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/query/pool.py:202: in wrapped_callee return [result_set for result_set in it] contrib/python/ydb/py3/ydb/_utilities.py:173: in __next__ return self._next() contrib/python/ydb/py3/ydb/_utilities.py:164: in _next res = self.wrapper(next(self.it)) contrib/python/grpcio/py3/grpc/_channel.py:475: in __next__ return self._next() contrib/python/grpcio/py3/grpc/_channel.py:881: in _next raise self E grpc._channel._MultiThreadedRendezvous: <_MultiThreadedRendezvous of RPC that terminated with: E status = StatusCode.UNAVAILABLE E details = "Socket closed" E debug_error_string = "UNKNOWN:Error received from peer {grpc_message:"Socket closed", grpc_status:14, created_time:"2025-05-03T08:51:34.907460421+00:00", file_line:1211, file:"/home/runner/actions_runner/_work/ydb/ydb/contrib/libs/grpc/src/core/lib/surface/call.cc"}" E >teardown failed: ydb/tests/olap/test_log_scenario.py:90: in teardown_class ..[snippet truncated].. a option E VERIFY failed (2025-05-03T08:51:32.499776Z): memory leak; Expected 851968, actual 786432 (12 page(s), 65536 offloaded); allocator created at: ydb/library/yql/dq/actors/compute/dq_compute_actor_impl.h:200 E yql/essentials/minikql/aligned_page_pool.cpp:346 E ~TAlignedPagePoolImpl(): requirement TotalAllocated == FreePages.size() * POOL_PAGE_SIZE failed E ======= terminate() call stack ======== E 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0xA39A9B5 E 1. /-S/util/system/yassert.cpp:55: Panic @ 0xA3931F6 E 2. /tmp//-S/yql/essentials/minikql/aligned_page_pool.cpp:343: ~TAlignedPagePoolImpl @ 0xCA13D6E E 3. /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:173: __release_shared @ 0x1A0D24B6 E 4. /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:219: __release_shared @ 0x1A0D24B6 E 5. /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:694: ~shared_ptr @ 0x1A0D24B6 E 6. /tmp//-S/ydb/core/kqp/runtime/kqp_write_actor.cpp:172: ~TKqpTableWriteActor @ 0x1A0D24B6 E 7. /tmp//-S/ydb/core/kqp/runtime/kqp_write_actor.cpp:172: ?? @ 0x1A0D25AD E 8. /-S/util/generic/ptr.h:36: CheckedDelete @ 0xB0F2F27 E 9. /-S/util/generic/ptr.h:57: Destroy @ 0xB0F2F27 E 10. /-S/util/generic/ptr.h:376: DoDestroy @ 0xB0F2F27 E 11. /-S/util/generic/ptr.h:306: ~THolder @ 0xB0F2F27 E 12. /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:154: destroy @ 0xB0F2F27 E 13. /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator_traits.h:335: destroy, 0> @ 0xB0F2F27 E 14. /-S/contrib/libs/cxxsupp/libcxx/include/vector:1015: __base_destruct_at_end @ 0xB0F2F27 E 15. /-S/contrib/libs/cxxsupp/libcxx/include/vector:1009: __clear @ 0xB0F2F27 E 16. /-S/contrib/libs/cxxsupp/libcxx/include/vector:766: clear @ 0xB0F2F27 E 17. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:113: DropUnregistered @ 0xB0F2F27 E 18. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:280: Execute @ 0xB0F3BC9 E 19. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:460: operator() @ 0xB0F77C2 E 20. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:512: ProcessExecutorPool @ 0xB0F7390 E 21. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:538: ThreadProc @ 0xB0F7FFA E 22. /-S/util/system/thread.cpp:244: ThreadProxy @ 0xA39BE1C E 23. ??:0: ?? @ 0x7FC422572AC2 E 24. ??:0: ?? @ 0x7FC42260484F Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/test-results/py3test/testing_out_stuff/test_log_scenario.py.TestLogScenario.test.1051200.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/test-results/py3test/testing_out_stuff ------ FAIL: 2 - GOOD, 1 - FAIL ydb/tests/olap ydb/tests/olap/data_quotas [size:medium] nchunks:10 ------ [2/10] chunk ran 1 test (total:219.53s - test:219.46s) [fail] test_quota_exhaustion.py::TestYdbWorkload::test_duplicates [default-linux-x86_64-relwithdebinfo] (217.89s) ydb/tests/olap/data_quotas/test_quota_exhaustion.py:236: in test_duplicates self.upsert_until_overload(lambda i: self.upsert_test_chunk(session, table_path, 0, retries=0), timeout_seconds=200) ydb/tests/olap/data_quotas/test_quota_exhaustion.py:83: in upsert_until_overload assert time.time() <= deadline, "deadline exceeded" E AssertionError: deadline exceeded E assert 1746262148.7538528 <= 1746262147.8769581 E + where 1746262148.7538528 = () E + where = time.time Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff/test_quota_exhaustion.py.TestYdbWorkload.test_duplicates.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff ------ FAIL: 2 - GOOD, 1 - FAIL ydb/tests/olap/data_quotas ydb/tests/olap/oom [size:medium] ------ [overlapping_portions.py] chunk ran 1 test (total:153.31s - test:125.33s) [fail] overlapping_portions.py::TestOverlappingPortions::test [default-linux-x86_64-relwithdebinfo] (119.33s) ydb/tests/olap/oom/overlapping_portions.py:103: in test with pytest.raises(ydb.issues.GenericError, match=r'.*cannot allocate memory.*'): E Failed: DID NOT RAISE Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/oom/test-results/py3test/testing_out_stuff/overlapping_portions.py.TestOverlappingPortions.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/oom/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/olap/oom ydb/tests/olap/scenario [size:medium] ------ sole chunk ran 18 tests (total:616.67s - test:600.02s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_alter_compression.py::TestAlterCompression::test[alter_compression] (timeout) duration: 186.35s test_alter_tiering.py::TestAlterTiering::test[many_tables] (good) duration: 142.89s test_insert.py::TestInsert::test[read_data_during_bulk_upsert] (good) duration: 121.11s test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] (good) duration: 113.11s test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] (good) duration: 27.03s test_simple.py::TestSimple::test[tablestores] (good) duration: 12.48s test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] (good) duration: 2.32s test_simple.py::TestSimple::test_multi[alter_table] (good) duration: 2.12s test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] (good) duration: 2.10s test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] (good) duration: 2.05s 6 more tests with 1.71s total duration are not listed. 2 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [timeout] test_alter_compression.py::TestAlterCompression::test[alter_compression] [default-linux-x86_64-relwithdebinfo] (186.35s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alter_compression.py.TestAlterCompression.test.alter_compression.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 15 - GOOD, 2 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] nchunks:10 ------ [1/10] chunk ran 1 test (total:609.15s - test:600.01s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test (timeout) duration: 607.09s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-relwithdebinfo] (607.09s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ TIMEOUT: 7 - GOOD, 1 - TIMEOUT ydb/tests/olap/ttl_tiering ------ sole chunk ran 2 tests (total:211.97s - test:211.92s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 8.7G (9162780K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 205170 44.8M 44.8M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 205217 32.5M 20.6M 8.1M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 205222 169M 170M 117M └─ ydb-tests-stress-kv-tests --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doct 205535 1.0G 1.0G 802M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 205537 952M 953M 713M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 205538 967M 968M 729M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 205540 917M 919M 677M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 205541 933M 935M 697M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 205542 897M 898M 681M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 205543 978M 979M 740M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 205544 989M 990M 750M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 205545 975M 976M 737M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 237625 41.7M 43.8M 12.2M └─ ydb --verbose --endpoint grpc://localhost:23725 --database=/Root workload kv run mixed --seconds 100 --threads 10 --cols 5 --len 200 --int-cols 2 --key-cols 3 --pa Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/stderr ydb/services/ydb/ut [size:medium] nchunks:60 ------ [30/60] chunk ran 5 tests (total:23.89s - test:23.79s) [fail] YdbLogStore::AlterLogTable [default-linux-x86_64-relwithdebinfo] (0.87s) assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:14 Get at /-S/util/generic/ptr.h:595:16 ~TStringBuilder at /-S/util/string/builder.h:8:11 NTestSuiteYdbLogStore::TCurrentTest::Execute()::'lambda'()::operator()() const at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:1 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:640:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.out ------ FAIL: 287 - GOOD, 1 - FAIL ydb/services/ydb/ut Total 86 suites: 80 - GOOD 4 - FAIL 2 - TIMEOUT Total 1146 tests: 1136 - GOOD 4 - FAIL 2 - NOT_LAUNCHED 2 - TIMEOUT 2 - SKIPPED Cache efficiency ratio is 98.85% (35421 of 35833). Local: 0 (0.00%), dist: 2130 (5.94%), by dynamic uids: 0 (0.00%), avoided: 33291 (92.91%) Dist cache download: count=2117, size=9.99 GiB, speed=160.69 MiB/s Disk usage for tools/sdk at least 31.52 MiB Additional disk space consumed for build cache 19.74 GiB Critical path: [ 600 ms] [CF] [20f9C3Iev1TOpW73h2_hFg default-linux-x86_64 relwithdebinfo]: $(BUILD_ROOT)/library/cpp/build_info/sandbox.cpp [started: 0 (1746261740734), finished: 600 (1746261741334)] [ 1180 ms] [CC] [GqENgENKCpXvzmkkepKrNw default-linux-x86_64 relwithdebinfo]: $(BUILD_ROOT)/library/cpp/build_info/sandbox.cpp [started: 33936 (1746261774670), finished: 35116 (1746261775850)] [ 168 ms] [AR] [I_WBwUTtgQjTzWqSlSKJPg default-linux-x86_64 relwithdebinfo]: $(BUILD_ROOT)/library/cpp/build_info/liblibrary-cpp-build_info.a [started: 37290 (1746261778024), finished: 37458 (1746261778192)] [ 7316 ms] [LD] [SErxy4U2vTWhxflFDEToXA default-linux-x86_64 relwithdebinfo]: $(BUILD_ROOT)/ydb/apps/ydbd/ydbd [started: 40978 (1746261781712), finished: 48294 (1746261789028)] [609535 ms] [TM] [rnd-9122185917897818427 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 163200 (1746261903934), finished: 772735 (1746262513469)] [ 23300 ms] [TA] [rnd-sha9kk4er04lwxl6]: $(BUILD_ROOT)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} [started: 772749 (1746262513483), finished: 796049 (1746262536783)] Time from start: 899205.2368164062 ms, time elapsed by graph 642099 ms, time diff 257106.23681640625 ms. The longest 10 tasks: [617009 ms] [TM] [rnd-cwp1ckjpp03nkci7 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/scenario/py3test [started: 1746262000891, finished: 1746262617900] [609535 ms] [TM] [rnd-9122185917897818427 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746261903934, finished: 1746262513469] [575622 ms] [TM] [rnd-10949490563858526940 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/data_quotas/py3test [started: 1746261936744, finished: 1746262512366] [493427 ms] [TM] [rnd-5649366734694496169 default-linux-x86_64 relwithdebinfo]: ydb/tests/functional/ydb_cli/py3test [started: 1746261793698, finished: 1746262287125] [442156 ms] [TM] [rnd-zi78av2fbtuhz399 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/column_family/compression/py3test [started: 1746262015775, finished: 1746262457931] [400736 ms] [TM] [rnd-11609013792520773034 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746261851516, finished: 1746262252252] [324455 ms] [TM] [rnd-13895423918822930110 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746261790184, finished: 1746262114639] [316962 ms] [TM] [rnd-9uiev2q3au2d0aep default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/py3test [started: 1746262015947, finished: 1746262332909] [291400 ms] [TM] [rnd-384562406360217433 default-linux-x86_64 relwithdebinfo]: ydb/tests/functional/tpc/medium/py3test [started: 1746262020588, finished: 1746262311988] [219861 ms] [TM] [rnd-14543428576282422341 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/data_quotas/py3test [started: 1746261941449, finished: 1746262161310] Total time by type: [17358854 ms] [TM] [count: 310, ave time 55996.30 msec] [ 5982050 ms] [prepare:get from dist cache] [count: 2130, ave time 2808.47 msec] [ 502596 ms] [prepare:tools] [count: 18, ave time 27922.00 msec] [ 379914 ms] [TS] [count: 61, ave time 6228.10 msec] [ 123648 ms] [prepare:bazel-store] [count: 3, ave time 41216.00 msec] [ 76211 ms] [TA] [count: 16, ave time 4763.19 msec] [ 54840 ms] [prepare:AC] [count: 4, ave time 13710.00 msec] [ 53294 ms] [LD] [count: 7, ave time 7613.43 msec] [ 46022 ms] [prepare:put into local cache, clean build dir] [count: 2141, ave time 21.50 msec] [ 22052 ms] [PY] [count: 1, ave time 22052.00 msec] [ 14665 ms] [CC] [count: 6, ave time 2444.17 msec] [ 1262 ms] [AR] [count: 5, ave time 252.40 msec] [ 1198 ms] [CF] [count: 2, ave time 599.00 msec] [ 465 ms] [BI] [count: 1, ave time 465.00 msec] [ 406 ms] [prepare:put to dist cache] [count: 11, ave time 36.91 msec] [ 317 ms] [ld] [count: 1, ave time 317.00 msec] [ 82 ms] [CP] [count: 1, ave time 82.00 msec] [ 67 ms] [prepare:clean] [count: 3, ave time 22.33 msec] [ 56 ms] [BN] [count: 1, ave time 56.00 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 17814979 ms (99.48%) Total run tasks time - 17908370 ms Configure time - 21.0 s Statistics overhead 912 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json Ok + echo 0 + ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build relwithdebinfo -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.C9chXT6OHK --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends -X --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-relwithdebinfo Configuring dependencies for platform tools Configuring dependencies for platform test_tool_tc1-global Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 2.2%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a | 5.2%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a | 6.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/tool | 8.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |11.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |12.6%| [LD] {RESULT} $(B)/ydb/core/kqp/tests/tpch/tpch |12.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |13.4%| PREPARE $(JDK_DEFAULT-472926544) |14.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |14.8%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |15.5%| PREPARE $(YMAKE_PYTHON3-4256832079) |16.2%| PREPARE $(LLD_ROOT-3808007503) |16.9%| PREPARE $(CLANG-1922233694) |17.6%| PREPARE $(CLANG_FORMAT-1286082657) |18.3%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |19.0%| PREPARE $(FLAKE8_PY3-715603131) |19.7%| PREPARE $(TEST_TOOL_HOST-sbr:8580453620) |20.4%| PREPARE $(JDK17-472926544) |21.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/tests/tpch/tpch |21.8%| PREPARE $(PYTHON) |22.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |23.2%| PREPARE $(GDB) |23.9%| PREPARE $(CLANG-874354456) |24.6%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |25.4%| PREPARE $(WITH_JDK17-sbr:7832760150) |26.1%| PREPARE $(WITH_JDK-sbr:7832760150) |27.5%| PREPARE $(CLANG16-1380963495) |28.2%| PREPARE $(CLANG18-1866954364) |28.9%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |33.1%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |32.9%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |33.6%| [AR] {RESULT} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |34.2%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |34.9%| [LD] {RESULT} $(B)/ydb/tests/stability/tool/tool |36.2%| COMPACTING CACHE 19.7GiB |36.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |37.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |38.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |38.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |39.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |40.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |40.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |41.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |42.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |43.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |43.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |44.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |45.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |45.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |46.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |47.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |47.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |48.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |49.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |49.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |50.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |51.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |51.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |52.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |53.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |53.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable |54.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |55.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |55.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |56.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |57.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |57.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |58.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |59.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |59.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |60.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |61.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |61.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |63.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |63.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |65.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |66.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |67.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |67.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |68.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |69.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |69.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |70.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |71.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |71.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |72.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |73.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |73.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |75.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] |75.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> overlapping_portions.py::TestOverlappingPortions::test >> ttl_unavailable_s3.py::TestUnavailableS3::test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change >> data_correctness.py::TestDataCorrectness::test |76.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |77.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> tier_delete.py::TestTierDelete::test_delete_s3_ttl >> unstable_connection.py::TestUnstableConnection::test >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete >> test_workload.py::TestYdbKvWorkload::test[row] |77.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test |78.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> zip_bomb.py::TestZipBomb::test |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |81.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] Test command err: 2025-05-03T09:24:20.867966Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500147887947844374:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T09:24:20.868026Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/t3bx/000034/r3tmp/tmpKCifR8/pdisk_1.dat 2025-05-03T09:24:20.914991Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18857, node 1 2025-05-03T09:24:20.937780Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T09:24:20.937795Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T09:24:20.937797Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T09:24:20.937848Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62971 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T09:24:20.967783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-05-03T09:24:20.968228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T09:24:20.968260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T09:24:20.969669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-03T09:24:20.996174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:60070" , at schemeshard: 72057594046644480 2025-05-03T09:24:20.996270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-03T09:24:20.996282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusPreconditionFailed, reason: Column stores are not supported, at schemeshard: 72057594046644480 2025-05-03T09:24:20.997124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusPreconditionFailed Reason: "Column stores are not supported" TxId: 281474976710658 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-05-03T09:24:20.997151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusPreconditionFailed, reason: Column stores are not supported, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-05-03T09:24:20.997529Z node 1 :TX_PROXY ERROR: Actor# [1:7500147887947845273:2589] txid# 281474976710658, issues: { message: "Column stores are not supported" severity: 1 } assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture()+28 (0x1634487C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x164B5259) NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&)+2476 (0x161C2E2C) NTestSuiteYdbLogStore::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x161D3E17) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x164B710E) NTestSuiteYdbLogStore::TCurrentTest::Execute()+429 (0x161D37DD) NUnitTest::TTestFactory::Execute()+803 (0x164B7883) NUnitTest::RunMain(int, char**)+3021 (0x164C942D) ??+0 (0x7F95FC840D90) __libc_start_main+128 (0x7F95FC840E40) _start+41 (0x14EAE029) |85.2%| [TA] $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.9%| [TA] {RESULT} $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> zip_bomb.py::TestZipBomb::test [GOOD] >> test_log_scenario.py::TestLogScenario::test[180] >> data_correctness.py::TestDataCorrectness::test [GOOD] >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete [GOOD] >> test_workload.py::TestYdbKvWorkload::test[row] [GOOD] >> test_workload.py::TestYdbKvWorkload::test[column] >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] [GOOD] >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> data_correctness.py::TestDataCorrectness::test [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/t3bx/000062/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/t3bx/000062/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 263416 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] >> overlapping_portions.py::TestOverlappingPortions::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/t3bx/000066/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/t3bx/000066/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback !!! simulating S3 hang up -- sending SIGSTOP !!! simulating S3 recovery -- sending SIGCONT contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 263003 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |87.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/oom/py3test >> overlapping_portions.py::TestOverlappingPortions::test [GOOD] >> unstable_connection.py::TestUnstableConnection::test [GOOD] |88.6%| [TA] $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> unstable_connection.py::TestUnstableConnection::test [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/t3bx/00005e/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/t3bx/00005e/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 263249 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |89.9%| [TA] {RESULT} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/t3bx/00005f/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/t3bx/00005f/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 263429 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> tier_delete.py::TestTierDelete::test_delete_s3_ttl [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> tier_delete.py::TestTierDelete::test_delete_s3_ttl [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/t3bx/000061/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/t3bx/000061/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {'__DEFAULT': 100000}, portions: 2 contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 263296 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [GOOD] >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] [GOOD] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/kv/tests/py3test >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] |92.6%| [TM] {RESULT} ydb/tests/stress/kv/tests/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates [GOOD] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_table] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates [GOOD] Test command err: Database name /Root/test upsert #0 ok, result: [] Quota exceeded False upsert #1 ok, result: [] Quota exceeded False upsert #2 ok, result: [] Quota exceeded False upsert #3 ok, result: [] Quota exceeded False upsert #4 ok, result: [] Quota exceeded False upsert #5 ok, result: [] Quota exceeded False upsert #6 ok, result: [] Quota exceeded False upsert #7 ok, result: [] Quota exceeded False upsert #8 ok, result: [] Quota exceeded False upsert #9 ok, result: [] Quota exceeded False upsert #10 ok, result: [] Quota exceeded False upsert #11 ok, result: [] Quota exceeded False upsert #12 ok, result: [] Quota exceeded False upsert #13 ok, result: [] Quota exceeded False upsert #14 ok, result: [] Quota exceeded False upsert #15 ok, result: [] Quota exceeded False upsert #16 ok, result: [] Quota exceeded False upsert #17 ok, result: [] Quota exceeded False upsert #18 ok, result: [] Quota exceeded False upsert #19 ok, result: [] Quota exceeded False upsert #20 ok, result: [] Quota exceeded False upsert #21 ok, result: [] Quota exceeded False upsert #22 ok, result: [] Quota exceeded False upsert #23 ok, result: [] Quota exceeded False upsert #24 ok, result: [] Quota exceeded False upsert #25 ok, result: [] Quota exceeded False upsert #26 ok, result: [] Quota exceeded False upsert #27 ok, result: [] Quota exceeded False upsert #28 ok, result: [] Quota exceeded False upsert #29 ok, result: [] Quota exceeded False upsert #30 ok, result: [] Quota exceeded False upsert #31 ok, result: [] Quota exceeded False upsert #32 ok, result: [] Quota exceeded False upsert #33 ok, result: [] Quota exceeded False upsert #34 ok, result: [] Quota exceeded False upsert #35 ok, result: [] Quota exceeded False upsert #36 ok, result: [] Quota exceeded False upsert #37 ok, result: [] Quota exceeded False upsert #38 ok, result: [] Quota exceeded False upsert #39 ok, result: [] Quota exceeded False upsert #40 ok, result: [] Quota exceeded False upsert #41 ok, result: [] Quota exceeded False upsert #42 ok, result: [] Quota exceeded False upsert #43 ok, result: [] Quota exceeded False upsert #44 ok, result: [] Quota exceeded False upsert #45 ok, result: [] Quota exceeded False upsert #46 ok, result: [] Quota exceeded False upsert #47 ok, result: [] Quota exceeded False upsert #48 ok, result: [] Quota exceeded False upsert #49 ok, result: [] Quota exceeded False upsert #50 ok, result: [] Quota exceeded False upsert #51 ok, result: [] Quota exceeded False upsert #52 ok, result: [] Quota exceeded False upsert #53 ok, result: [] Quota exceeded False upsert #54 ok, result: [] Quota exceeded False upsert #55 ok, result: [] Quota exceeded False upsert #56 ok, result: [] Quota exceeded False upsert #57 ok, result: [] Quota exceeded False upsert #58 ok, result: [] Quota exceeded False upsert #59 ok, result: [] Quota exceeded False upsert #60 ok, result: [] Quota exceeded False upsert #61 ok, result: [] Quota exceeded False upsert #62 ok, result: [] Quota exceeded False upsert #63 ok, result: [] Quota exceeded False upsert #64 ok, result: [] Quota exceeded False upsert #65 ok, result: [] Quota exceeded False upsert #66 ok, result: [] Quota exceeded False upsert #67 ok, result: [] Quota exceeded False upsert #68 ok, result: [] Quota exceeded False upsert #69 ok, result: [] Quota exceeded False upsert #70 ok, result: [] Quota exceeded False upsert #71 ok, result: [] Quota exceeded False upsert #72 ok, result: [] Quota exceeded False upsert #73 ok, result: [] Quota exceeded False upsert #74 ok, result: [] Quota exceeded False upsert #75 ok, result: [] Quota exceeded False upsert #76 ok, result: [] Quota exceeded False upsert #77 ok, result: [] Quota exceeded False upsert #78 ok, result: [] Quota exceeded False upsert #79 ok, result: [] Quota exceeded False upsert #80 ok, result: [] Quota exceeded False upsert #81 ok, result: [] Quota exceeded False upsert #82 ok, result: [] Quota exceeded False upsert #83 ok, result: [] Quota exceeded False upsert #84 ok, result: [] Quota exceeded False upsert #85 ok, result: [] Quota exceeded False upsert #86 ok, result: [] Quota exceeded False upsert #87 ok, result: [] Quota exceeded False upsert #88 ok, result: [] Quota exceeded False upsert #89 ok, result: [] Quota exceeded False upsert #90 ok, result: [] Quota exceeded False upsert #91 ok, result: [] Quota exceeded False upsert #92 ok, result: [] Quota exceeded False upsert #93 ok, result: [] Quota exceeded False upsert #94 ok, result: [] Quota exceeded False upsert #95 ok, result: [] Quota exceeded False upsert #96 ok, result: [] Quota exceeded False upsert #97 ok, result: [] Quota exceeded False upsert #98 ok, result: [] Quota exceeded False upsert #99 ok, result: [] Quota exceeded False upsert #100 ok, result: [] Quota exceeded False upsert #101 ok, result: [] Quota exceeded False upsert #102 ok, result: [] Quota exceeded False upsert: got overload issue |94.0%| [TA] $(B)/ydb/tests/olap/data_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |94.6%| [TA] {RESULT} $(B)/ydb/tests/olap/data_quotas/test-results/py3test/{meta.json ... results_accumulator.log} >> test_simple.py::TestSimple::test_multi[alter_table] [GOOD] >> test_simple.py::TestSimple::test[alter_table] >> test_simple.py::TestSimple::test[alter_table] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test[alter_tablestore] >> test_simple.py::TestSimple::test[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test_multi[table] [GOOD] >> test_simple.py::TestSimple::test[table] >> test_simple.py::TestSimple::test[table] [GOOD] >> test_simple.py::TestSimple::test_multi[tablestores] [GOOD] >> test_simple.py::TestSimple::test[tablestores] >> test_simple.py::TestSimple::test[tablestores] [GOOD] >> test_log_scenario.py::TestLogScenario::test[180] [GOOD] >> test_log_scenario.py::TestLogScenario::test[1051200] >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] [GOOD] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] [GOOD] >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] [GOOD] >> test_alter_compression.py::TestAlterCompression::test[alter_compression] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [GOOD] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering >> ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/t3bx/000064/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/t3bx/000064/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 263015 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_alter_compression.py::TestAlterCompression::test[alter_compression] [GOOD] >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] [GOOD] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test 2025-05-03 09:34:20,817 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-03 09:34:20,884 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 259273 169M 173M 114M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/t3bx/000060/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 259919 966M 967M 705M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/t3bx/000060/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1 263600 114M 114M 86.3M └─ moto_server s3 --port 16016 Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/t3bx/000060/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/t3bx/000060/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 171, in test if not self.wait_for( File "ydb/tests/olap/ttl_tiering/base.py", line 88, in wait_for time.sleep(1) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/t3bx/000060/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/t3bx/000060/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/t3bx/000060', '--source-root', '/home/runner/.ya/build/build_root/t3bx/000060/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/t3bx/000060/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '1', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/t3bx/000060/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/t3bx/000060/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/t3bx/000060', '--source-root', '/home/runner/.ya/build/build_root/t3bx/000060/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/t3bx/000060/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '1', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) |96.6%| [TA] $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/py3test >> test_alter_tiering.py::TestAlterTiering::test[many_tables] 2025-05-03 09:34:21,318 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-03 09:34:21,458 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 259438 4.2G 4.2G 4.1G ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/t3bx/000044/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules 272922 1.7G 1.7G 1.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/t3bx/000044/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alte Test command err: test_suffix, num 0, table path read_data_during_bulk_upsert0 start_time 1746264265.1367772 test_suffix, num 1, table path read_data_during_bulk_upsert1 start_time 1746264265.1371512 test_suffix, num 2, table path read_data_during_bulk_upsert2 start_time 1746264265.1382804 test_suffix, num 3, table path read_data_during_bulk_upsert3 start_time 1746264265.1402311 test_suffix, num 4, table path read_data_during_bulk_upsert4 start_time 1746264265.141561 test_suffix, num 5, table path read_data_during_bulk_upsert5 start_time 1746264265.1418846 test_suffix, num 6, table path read_data_during_bulk_upsert6 start_time 1746264265.1459546 test_suffix, num 7, table path read_data_during_bulk_upsert7 start_time 1746264265.1473405 test_suffix, num 8, table path read_data_during_bulk_upsert8 start_time 1746264265.148227 test_suffix, num 9, table path read_data_during_bulk_upsert9 start_time 1746264265.1501365 Path read_data_during_bulk_upsert0 removed Path read_data_during_bulk_upsert2 removed Path read_data_during_bulk_upsert1 removed Path read_data_during_bulk_upsert3 removed Path read_data_during_bulk_upsert4 removed Path read_data_during_bulk_upsert5 removed Path read_data_during_bulk_upsert7 removed Path read_data_during_bulk_upsert6 removed Path read_data_during_bulk_upsert9 removed Path read_data_during_bulk_upsert8 removed Path read_data_during_bulk_upsert5 removed Path read_data_during_bulk_upsert4 removed Path read_data_during_bulk_upsert1 removed Path read_data_during_bulk_upsert3 removed Path read_data_during_bulk_upsert2 removed Path read_data_during_bulk_upsert6 removed Path read_data_during_bulk_upsert7 removed Path read_data_during_bulk_upsert8 removed Path read_data_during_bulk_upsert9 removed Path read_data_during_bulk_upsert0 removed test_suffix, num 0, table path read_data_during_bulk_upsert start_time 1746264368.3151696 Path read_data_during_bulk_upsert removed Path read_data_during_bulk_upsert removed test_suffix, num 0, table path read_update_write_load start_time 1746264470.3240337 Path read_update_write_load removed Was written: 0.0 MiB, Speed: 0.0 MiB/s Step 1. only write Write: 10% 1229 30% 1229 50% 1229 90% 1229 99% 1229 ms Write: 10% 606 30% 606 50% 606 90% 606 99% 606 ms Write: 10% 6065 30% 6065 50% 6065 90% 6065 99% 6065 ms Write: 10% 6413 30% 6413 50% 6413 90% 6413 99% 6413 ms Write: 10% 5744 30% 5744 50% 5744 90% 5744 99% 5744 ms Write: 10% 5893 30% 5893 50% 5893 90% 5893 99% 5893 ms Write: 10% 5658 30% 5658 50% 5658 90% 5658 99% 5658 ms Write: 10% 5541 30% 5541 50% 5541 90% 5541 99% 5541 ms Write: 10% 5379 30% 5379 50% 5379 90% 5379 99% 5379 ms Write: 10% 5258 30% 5258 50% 5258 90% 5258 99% 5258 ms Write: 10% 5290 30% 5290 50% 5290 90% 5290 99% 5290 ms Write: 10% 4750 30% 4750 50% 4750 90% 4750 99% 4750 ms Write: 10% 4842 30% 4842 50% 4842 90% 4842 99% 4842 ms Write: 10% 4573 30% 4573 50% 4573 90% 4573 99% 4573 ms Write: 10% 5157 30% 5157 50% 5157 90% 5157 99% 5157 ms Write: 10% 4923 30% 4923 50% 4923 90% 4923 99% 4923 ms Write: 10% 4406 30% 4406 50% 4406 90% 4406 99% 4406 ms Write: 10% 4520 30% 4520 50% 4520 90% 4520 99% 4520 ms Write: 10% 4535 30% 4535 50% 4535 90% 4535 99% 4535 ms Write: 10% 3810 30% 3810 50% 3810 90% 3810 99% 3810 ms Write: 10% 4051 30% 4051 50% 4051 90% 4051 99% 4051 ms Write: 10% 4255 30% 4255 50% 4255 90% 4255 99% 4255 ms Write: 10% 3764 30% 3764 50% 3764 90% 3764 99% 3764 ms Write: 10% 3729 30% 3729 50% 3729 90% 3729 99% 3729 ms Write: 10% 3546 30% 3546 50% 3546 90% 3546 99% 3546 ms Write: 10% 3642 30% 3642 50% 3642 90% 3642 99% 3642 ms Write: 10% 3624 30% 3624 50% 3624 90% 3624 99% 3624 ms Write: 10% 3488 30% 3488 50% 3488 90% 3488 99% 3488 ms Write: 10% 3564 30% 3564 50% 3564 90% 3564 99% 3564 ms Write: 10% 3539 30% 3539 50% 3539 90% 3539 99% 3539 ms Write: 10% 3467 30% 3467 50% 3467 90% 3467 99% 3467 ms Write: 10% 3534 30% 3534 50% 3534 90% 3534 99% 3534 ms Write: 10% 3226 30% 3226 50% 3226 90% 3226 99% 3226 ms Write: 10% 3416 30% 3416 50% 3416 90% 3416 99% 3416 ms Write: 10% 3112 30% 3112 50% 3112 90% 3112 99% 3112 ms Write: 10% 3193 30% 3193 50% 3193 90% 3193 99% 3193 ms Write: 10% 3090 30% 3090 50% 3090 90% 3090 99% 3090 ms Write: 10% 2913 30% 2913 50% 2913 90% 2913 99% 2913 ms Write: 10% 2930 30% 2930 50% 2930 90% 2930 99% 2930 ms Write: 10% 2912 30% 2912 50% 2912 90% 2912 99% 2912 ms Write: 10% 2795 30% 2795 50% 2795 90% 2795 99% 2795 ms Write: 10% 2807 30% 2807 50% 2807 90% 2807 99% 2807 ms Write: 10% 2680 30% 2680 50% 2680 90% 2680 99% 2680 ms Write: 10% 2331 30% 2331 50% 2331 90% 2331 99% 2331 ms Write: 10% 2560 30% 2560 50% 2560 90% 2560 99% 2560 ms Write: 10% 2078 30% 2078 50% 2078 90% 2078 99% 2078 ms Write: 10% 2112 30% 2112 50% 2112 90% 2112 99% 2112 ms Write: 10% 2130 30% 2130 50% 2130 90% 2130 99% 2130 ms Write: 10% 1944 30% 1944 50% 1944 90% 1944 99% 1944 ms Write: 10% 2110 30% 2110 50% 2110 90% 2110 99% 2110 ms Write: 10% 1662 30% 1662 50% 1662 90% 1662 99% 1662 ms Write: 10% 1589 30% 1589 50% 1589 90% 1589 99% 1589 ms Write: 10% 1725 30% 1725 50% 1725 90% 1725 99% 1725 ms Write: 10% 1445 30% 1445 50% 1445 90% 1445 99% 1445 ms Write: 10% 1880 30% 1880 50% 1880 90% 1880 99% 1880 ms Write: 10% 1420 30% 1420 50% 1420 90% 1420 99% 1420 ms Write: 10% 1342 30% 1342 50% 1342 90% 1342 99% 1342 ms Write: 10% 1397 30% 1397 50% 1397 90% 1397 99% 1397 ms Write: 10% 1384 30% 1384 50% 1384 90% 1384 99% 1384 ms Write: 10% 1300 30% 1300 50% 1300 90% 1300 99% 1300 ms Write: 10% 1288 30% 1288 50% 1288 90% 1288 99% 1288 ms Write: 10% 1255 30% 1255 50% 1255 90% 1255 99% 1255 ms Write: 10% 975 30% 975 50% 975 90% 975 99% 975 ms Write: 10% 1123 30% 1123 50% 1123 90% 1123 99% 1123 ms Step 2. read write Write: 10% 5963 30% 5963 50% 5963 90% 5963 99% 5963 ms Write: 10% 6836 30% 6836 50% 6836 90% 6836 99% 6836 ms Write: 10% 6407 30% 6407 50% 6407 90% 6407 99% 6407 ms Write: 10% 6616 30% 6616 50% 6616 90% 6616 99% 6616 ms Write: 10% 6535 30% 6535 50% 6535 90% 6535 99% 6535 ms Write: 10% 6641 30% 6641 50% 6641 90% 6641 99% 6641 ms Write: 10% 6499 30% 6499 50% 6499 90% 6499 99% 6499 ms Write: 10% 6596 30% 6596 50% 6596 90% 6596 99% 6596 ms Write: 10% 6536 30% 6536 50% 6536 90% 6536 99% 6536 ms Write: 10% 6522 30% 6522 50% 6522 90% 6522 99% 6522 ms Write: 10% 6364 30% 6364 50% 6364 90% 6364 99% 6364 ms Write: 10% 6366 30% 6366 50% 6366 90% 6366 99% 6366 ms Write: 10% 6363 30% 6363 50% 6363 90% 6363 99% 6363 ms Write: 10% 6163 30% 6163 50% 6163 90% 6163 99% 6163 ms Write: 10% 6346 30% 6346 50% 6346 90% 6346 99% 6346 ms Write: 10% 6062 30% 6062 50% 6062 90% 6062 99% 6062 ms Write: 10% 5694 30% 5694 50% 5694 90% 5694 99% 5694 ms Write: 10% 5713 30% 5713 50% 5713 90% 5713 99% 5713 ms Write: 10% 5624 30% 5624 50% 5624 90% 5624 99% 5624 ms Write: 10% 5601 30% 5601 50% 5601 90% 5601 99% 5601 ms Write: 10% 5426 30% 5426 50% 5426 90% 5426 99% 5426 ms Write: 10% 5151 30% 5151 50% 5151 90% 5151 99% 5151 ms Write: 10% 5167 30% 5167 50% 5167 90% 5167 99% 5167 ms Write: 10% 4663 30% 4663 50% 4663 90% 4663 99% 4663 ms Write: 10% 4878 30% 4878 50% 4878 90% 4878 99% 4878 ms Write: 10% 4540 30% 4540 50% 4540 90% 4540 99% 4540 ms Write: 10% 4536 30% 4536 50% 4536 90% 4536 99% 4536 ms Write: 10% 4309 30% 4309 50% 4309 90% 4309 99% 4309 ms Write: 10% 3805 30% 3805 50% 3805 90% 3805 99% 3805 ms Write: 10% 4733 30% 4733 50% 4733 90% 4733 99% 4733 ms Write: 10% 3958 30% 3958 50% 3958 90% 3958 99% 3958 ms Write: 10% 3979 30% 3979 50% 3979 90% 3979 99% 3979 ms Write: 10% 3270 30% 3270 50% 3270 90% 3270 99% 3270 ms Write: 10% 4364 30% 4364 50% 4364 90% 4364 99% 4364 ms Write: 10% 3702 30% 3702 50% 3702 90% 3702 99% 3702 ms Write: 10% 3607 30% 3607 50% 3607 90% 3607 99% 3607 ms Write: 10% 3235 30% 3235 50% 3235 90% 3235 99% 3235 ms Write: 10% 2970 30% 2970 50% 2970 90% 2970 99% 2970 ms Write: 10% 2963 30% 2963 50% 2963 90% 2963 99% 2963 ms Write: 10% 3473 30% 3473 50% 3473 90% 3473 99% 3473 ms Write: 10% 2562 30% 2562 50% 2562 90% 2562 99% 2562 ms Write: 10% 2634 30% 2634 50% 2634 90% 2634 99% 2634 ms Write: 10% 2504 30% 2504 50% 2504 90% 2504 99% 2504 ms Write: 10% 1994 30% 1994 50% 1994 90% 1994 99% 1994 ms Write: 10% 2409 30% 2409 50% 2409 90% 2409 99% 2409 ms Write: 10% 1480 30% 1480 50% 1480 90% 1480 99% 1480 ms Write: 10% 1664 30% 1664 50% 1664 90% 1664 99% 1664 ms Write: 10% 1526 30% 1526 50% 1526 90% 1526 99% 1526 ms Write: 10% 3038 30% 3038 50% 3038 90% 3038 99% 3038 ms Write: 10% 1489 30% 1489 50% 1489 90% 1489 99% 1489 ms Write: 10% 1245 30% 1245 50% 1245 90% 1245 99% 1245 ms Write: 10% 1382 30% 1382 50% 1382 90% 1382 99% 1382 ms Write: 10% 2054 30% 2054 50% 2054 90% 2054 99% 2054 ms Write: 10% 1215 30% 1215 50% 1215 90% 1215 99% 1215 ms Write: 10% 1164 30% 1164 50% 1164 90% 1164 99% 1164 ms Write: 10% 1117 30% 1117 50% 1117 90% 1117 99% 1117 ms Write: 10% 1749 30% 1749 50% 1749 90% 1749 99% 1749 ms Write: 10% 1514 30% 1514 50% 1514 90% 1514 99% 1514 ms Write: 10% 1275 30% 1275 50% 1275 90% 1275 99% 1275 ms Write: 10% 3020 30% 3020 50% 3020 90% 3020 99% 3020 ms Write: 10% 2819 30% 2819 50% 2819 90% 2819 99% 2819 ms Write: 10% 1183 30% 1183 50% 1183 90% 1183 99% 1183 ms Write: 10% 4377 30% 4377 50% 4377 90% 4377 99% 4377 ms Write: 10% 3629 30% 3629 50% 3629 90% 3629 99% 3629 ms Read: 10% 11187 30% 11187 50% 11187 90% 11187 99% 11187 ms Step 3. write modify Write: 10% 7037 30% 7037 50% 7037 90% 7037 99% 7037 ms Write: 10% 8154 30% 8154 50% 8154 90% 8154 99% 8154 ms Write: 10% 7702 30% 7702 50% 7702 90% 7702 99% 7702 ms Write: 10% 8015 30% 8015 50% 8015 90% 8015 99% 8015 ms Write: 10% 7583 30% 7583 50% 7583 90% 7583 99% 7583 ms Write: 10% 7750 30% 7750 50% 7750 90% 7750 99% 7750 ms Write: 10% 7706 30% 7706 50% 7706 90% 7706 99% 7706 ms Write: 10% 7301 30% 7301 50% 7301 90% 7301 99% 7301 ms Write: 10% 7298 30% 7298 50% 7298 90% 7298 99% 7298 ms Write: 10% 7317 30% 7317 50% 7317 90% 7317 99% 7317 ms Write: 10% 7345 30% 7345 50% 7345 90% 7345 99% 7345 ms Write: 10% 7067 30% 7067 50% 7067 90% 7067 99% 7067 ms Write: 10% 6691 30% 6691 50% 6691 90% 6691 99% 6691 ms Write: 10% 7026 30% 7026 50% 7026 90% 7026 99% 7026 ms Write: 10% 6655 30% 6655 50% 6655 90% 6655 99% 6655 ms Write: 10% 6935 30% 6935 50% 6935 90% 6935 99% 6935 ms Write: 10% 6680 30% 6680 50% 6680 90% 6680 99% 6680 ms Write: 10% 6696 30% 6696 50% 6696 90% 6696 99% 6696 ms Write: 10% 6565 30% 6565 50% 6565 90% 6565 99% 6565 ms Write: 10% 6344 30% 6344 50% 6344 90% 6344 99% 6344 ms Write: 10% 6312 30% 6312 50% 6312 90% 6312 99% 6312 ms Write: 10% 6538 30% 6538 50% 6538 90% 6538 99% 6538 ms Write: 10% 6458 30% 6458 50% 6458 90% 6458 99% 6458 ms Write: 10% 6269 30% 6269 50% 6269 90% 6269 99% 6269 ms Write: 10% 6018 30% 6018 50% 6018 90% 6018 99% 6018 ms Write: 10% 6325 30% 6325 50% 6325 90% 6325 99% 6325 ms Write: 10% 5746 30% 5746 50% 5746 90% 5746 99% 5746 ms Write: 10% 6010 30% 6010 50% 6010 90% 6010 99% 6010 ms Write: 10% 5511 30% 5511 50% 5511 90% 5511 99% 5511 ms Write: 10% 5560 30% 5560 50% 5560 90% 5560 99% 5560 ms Write: 10% 5572 30% 5572 50% 5572 90% 5572 99% 5572 ms Write: 10% 5558 30% 5558 50% 5558 90% 5558 99% 5558 ms Write: 10% 5430 30% 5430 50% 5430 90% 5430 99% 5430 ms Write: 10% 5458 30% 5458 50% 5458 90% 5458 99% 5458 ms Write: 10% 5359 30% 5359 50% 5359 90% 5359 99% 5359 ms Write: 10% 4940 30% 4940 50% 4940 90% 4940 99% 4940 ms Write: 10% 5344 30% 5344 50% 5344 90% 5344 99% 5344 ms Write: 10% 4829 30% 4829 50% 4829 90% 4829 99% 4829 ms Write: 10% 4999 30% 4999 50% 4999 90% 4999 99% 4999 ms Write: 10% 5281 30% 5281 50% 5281 90% 5281 99% 5281 ms Write: 10% 4939 30% 4939 50% 4939 90% 4939 99% 4939 ms Write: 10% 4978 30% 4978 50% 4978 90% 4978 99% 4978 ms Write: 10% 4740 30% 4740 50% 4740 90% 4740 99% 4740 ms Write: 10% 4786 30% 4786 50% 4786 90% 4786 99% 4786 ms Write: 10% 4641 30% 4641 50% 4641 90% 4641 99% 4641 ms Write: 10% 4534 30% 4534 50% 4534 90% 4534 99% 4534 ms Write: 10% 4486 30% 4486 50% 4486 90% 4486 99% 4486 ms Write: 10% 4625 30% 4625 50% 4625 90% 4625 99% 4625 ms Write: 10% 3702 30% 3702 50% 3702 90% 3702 99% 3702 ms Write: 10% 4556 30% 4556 50% 4556 90% 4556 99% 4556 ms W ... ms Write: 10% 3396 30% 3396 50% 3396 90% 3396 99% 3396 ms Write: 10% 3331 30% 3331 50% 3331 90% 3331 99% 3331 ms Write: 10% 3073 30% 3073 50% 3073 90% 3073 99% 3073 ms Write: 10% 3479 30% 3479 50% 3479 90% 3479 99% 3479 ms Write: 10% 3161 30% 3161 50% 3161 90% 3161 99% 3161 ms Write: 10% 3076 30% 3076 50% 3076 90% 3076 99% 3076 ms Write: 10% 3057 30% 3057 50% 3057 90% 3057 99% 3057 ms Update: 10% 724 30% 724 50% 724 90% 724 99% 724 ms Read: 10% 12963 30% 12963 50% 12963 90% 12963 99% 12963 ms Path read_update_write_load removed test_suffix, num 0, table path alter_table start_time 1746264529.1761336 Path alter_table removed Path alter_table removed test_suffix, num 0, table path alter_tablestore start_time 1746264529.7747555 Path alter_tablestore removed Path alter_tablestore removed test_suffix, num 0, table path table start_time 1746264530.3596818 Path table removed Path table removed test_suffix, num 0, table path tablestores start_time 1746264530.7609577 Path tablestores removed Path tablestores removed test_suffix, num 0, table path create_and_drop_tables start_time 1746264545.1834252 Path create_and_drop_tables removed Path create_and_drop_tables removed test_suffix, num 0, table path alter_compression start_time 1746264563.514482 Path alter_compression removed Path alter_compression removed test_suffix, num 0, table path many_tables start_time 1746264748.7692442 Path many_tables removed contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/moto/py3/moto/s3/models.py:122: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "/home/runner/.ya/build/build_root/t3bx/000044/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 100, in test self._test_suffix(ctx, get_external_param("table_suffix", ""), exit_codes, 0) File "/home/runner/.ya/build/build_root/t3bx/000044/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 110, in _test_suffix ctx.executable(self, ctx) File "ydb/tests/olap/scenario/test_alter_tiering.py", line 356, in scenario_many_tables threads.start_and_wait_all() File "ydb/tests/olap/common/thread_helper.py", line 49, in start_and_wait_all self.join_all() File "ydb/tests/olap/common/thread_helper.py", line 45, in join_all thread.join(timeout=timeout) File "ydb/tests/olap/common/thread_helper.py", line 16, in join super().join(timeout) File "contrib/tools/python3/Lib/threading.py", line 1149, in join self._wait_for_tstate_lock() File "contrib/tools/python3/Lib/threading.py", line 1169, in _wait_for_tstate_lock if lock.acquire(block, timeout): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Caught base exception, num 0 message Graceful shutdown requested contrib/python/moto/py3/moto/s3/models.py:122: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/build/build_root/t3bx/000044/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/t3bx/000044/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/t3bx/000044/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/t3bx/000044', '--source-root', '/home/runner/.ya/build/build_root/t3bx/000044/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/t3bx/000044/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/build/build_root/t3bx/000044/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/t3bx/000044/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/t3bx/000044/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/t3bx/000044', '--source-root', '/home/runner/.ya/build/build_root/t3bx/000044/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/t3bx/000044/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) |98.0%| [TA] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} |98.7%| [TM] {RESULT} ydb/tests/olap/scenario/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/py3test >> test_log_scenario.py::TestLogScenario::test[1051200] 2025-05-03 09:34:21,150 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-03 09:34:21,442 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 259428 417M 419M 362M ydb-tests-olap --basetemp /home/runner/.ya/build/build_root/t3bx/000041/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules --ya-tra 266042 11.7G 11.7G 11.4G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/t3bx/000041/ydb/tests/olap/test-results/py3test/testing_out_stuff/test_log_scenario. Test command err: Pid 259974 upsert #0 ok, result: [] upsert #1 ok, result: [] Rss after upsert 636832 [{'column0': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx1', 'column1': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx2', 'column2': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ... xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx5'}] Max rss {} 3774192 File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/test_log_scenario.py", line 134, in test logging.info(f"Count rows after insert {self.get_row_count()} before wait") File "ydb/tests/olap/test_log_scenario.py", line 111, in get_row_count return self.ydb_client.query(f"select count(*) as Rows from `{self.table_name}`")[0].rows[0]["Rows"] File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/build/build_root/t3bx/000041/ydb/tests/olap/ydb-tests-olap', '--basetemp', '/home/runner/.ya/build/build_root/t3bx/000041/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/t3bx/000041/ydb/tests/olap/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/t3bx/000041', '--source-root', '/home/runner/.ya/build/build_root/t3bx/000041/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/t3bx/000041/ydb/tests/olap/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--dep-root', 'ydb/tests/olap', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/build/build_root/t3bx/000041/ydb/tests/olap/ydb-tests-olap', '--basetemp', '/home/runner/.ya/build/build_root/t3bx/000041/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/t3bx/000041/ydb/tests/olap/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/t3bx/000041', '--source-root', '/home/runner/.ya/build/build_root/t3bx/000041/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/t3bx/000041/ydb/tests/olap/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--dep-root', 'ydb/tests/olap', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) |99.3%| [TM] {RESULT} ydb/tests/olap/py3test |99.9%| CLEANING BUILD ROOT ydb/tests/olap [size:medium] ------ sole chunk ran 3 tests (total:670.50s - test:600.02s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_log_scenario.py::TestLogScenario::test[1051200] (timeout) duration: 330.56s test_log_scenario.py::TestLogScenario::test[180] (good) duration: 235.15s zip_bomb.py::TestZipBomb::test (good) duration: 41.99s Info: Test run has exceeded 8.0G (8388608K) memory limit with 12.2G (12803096K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 259308 44.8M 44.8M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 259419 33.6M 21.9M 9.4M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 259428 417M 419M 362M └─ ydb-tests-olap --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules 266042 11.7G 11.7G 11.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/test-results/py3test/testing_out_stuff/test_log_sce Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/test-results/py3test/testing_out_stuff/stderr [timeout] test_log_scenario.py::TestLogScenario::test[1051200] [default-linux-x86_64-relwithdebinfo] (330.56s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/test-results/py3test/testing_out_stuff/test_log_scenario.py.TestLogScenario.test.1051200.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/test-results/py3test/testing_out_stuff ------ TIMEOUT: 2 - GOOD, 1 - TIMEOUT ydb/tests/olap ydb/tests/olap/scenario [size:medium] ------ sole chunk ran 18 tests (total:628.83s - test:600.09s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_alter_compression.py::TestAlterCompression::test[alter_compression] (good) duration: 183.35s test_alter_tiering.py::TestAlterTiering::test[many_tables] (timeout) duration: 139.71s test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] (good) duration: 105.21s test_insert.py::TestInsert::test[read_data_during_bulk_upsert] (good) duration: 99.95s test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] (good) duration: 56.83s test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] (good) duration: 16.35s test_simple.py::TestSimple::test[tablestores] (good) duration: 12.41s test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] (good) duration: 2.05s test_simple.py::TestSimple::test_multi[alter_table] (good) duration: 2.02s test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] (good) duration: 2.01s 8 more tests with 5.46s total duration are not listed. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [timeout] test_alter_tiering.py::TestAlterTiering::test[many_tables] [default-linux-x86_64-relwithdebinfo] (139.71s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alter_tiering.py.TestAlterTiering.test.many_tables.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 17 - GOOD, 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] nchunks:10 ------ [1/10] chunk ran 1 test (total:605.15s - test:600.02s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test (timeout) duration: 603.82s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-relwithdebinfo] (603.82s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ TIMEOUT: 7 - GOOD, 1 - TIMEOUT ydb/tests/olap/ttl_tiering ------ sole chunk ran 2 tests (total:211.65s - test:211.60s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 10.1G (10574540K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 259323 44.8M 44.8M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 259420 32.6M 20.8M 8.2M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 259432 184M 185M 132M └─ ydb-tests-stress-kv-tests --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doct 259772 1.1G 1.1G 916M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 259791 1.2G 1.2G 951M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 259797 1.2G 1.2G 995M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 259804 1.1G 1.1G 868M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 259805 1.1G 1.1G 887M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 259806 1.0G 1.0G 815M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 259807 1.0G 1.0G 813M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 259808 1.0G 1.0G 829M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 259809 1.0G 1.0G 806M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 268845 42.6M 0b 0b └─ ydb --verbose --endpoint grpc://localhost:63331 --database=/Root workload kv run mixed --seconds 100 --threads 10 --cols 5 --len 200 --int-cols 2 --key-cols 3 --pa Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/stderr ydb/services/ydb/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:3.85s - test:3.82s) [fail] YdbLogStore::AlterLogTable [default-linux-x86_64-relwithdebinfo] (0.89s) assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:14 Get at /-S/util/generic/ptr.h:595:16 ~TStringBuilder at /-S/util/string/builder.h:8:11 NTestSuiteYdbLogStore::TCurrentTest::Execute()::'lambda'()::operator()() const at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:1 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:640:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.out ------ FAIL: 1 - FAIL ydb/services/ydb/ut Total 7 suites: 3 - GOOD 1 - FAIL 3 - TIMEOUT Total 34 tests: 30 - GOOD 1 - FAIL 3 - TIMEOUT Cache efficiency ratio is 99.75% (35168 of 35256). Local: 17 (0.05%), dist: 0 (0.00%), by dynamic uids: 0 (0.00%), avoided: 35151 (99.70%) Dist cache download: count=0, size=0 bytes, speed=0.0 bytes/s Disk usage for tools/sdk 3.64 GiB Additional disk space consumed for build cache 0 bytes Critical path: [670861 ms] [TM] [rnd-2ah8uy6v4suh940g default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/py3test [started: 0 (1746264260832), finished: 670861 (1746264931693)] Time from start: 679777.2399902344 ms, time elapsed by graph 670861 ms, time diff 8916.239990234375 ms. The longest 10 tasks: [670861 ms] [TM] [rnd-2ah8uy6v4suh940g default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/py3test [started: 1746264260832, finished: 1746264931693] [631577 ms] [TM] [rnd-qwxya77ju2ydclb5 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/scenario/py3test [started: 1746264260834, finished: 1746264892411] [605560 ms] [TM] [rnd-14744795607544946834 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746264260425, finished: 1746264865985] [391775 ms] [TM] [rnd-11981991477194847247 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746264260350, finished: 1746264652125] [266640 ms] [TM] [rnd-7442883420337607911 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/data_quotas/py3test [started: 1746264260654, finished: 1746264527294] [211961 ms] [TM] [rnd-4vyio2lwkjf43239 default-linux-x86_64 relwithdebinfo]: ydb/tests/stress/kv/tests/py3test [started: 1746264260857, finished: 1746264472818] [191371 ms] [TM] [rnd-1814735483908157931 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746264260420, finished: 1746264451791] [171303 ms] [TM] [rnd-14735676724655873601 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746264260429, finished: 1746264431732] [148975 ms] [TM] [rnd-12220170491498265436 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746264260440, finished: 1746264409415] [128754 ms] [TM] [rnd-12694163686404855041 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/oom/py3test [started: 1746264260437, finished: 1746264389191] Total time by type: [3742211 ms] [TM] [count: 84, ave time 44550.13 msec] [ 37966 ms] [TA] [count: 4, ave time 9491.50 msec] [ 2669 ms] [prepare:bazel-store] [count: 1, ave time 2669.00 msec] [ 1126 ms] [prepare:get from local cache] [count: 17, ave time 66.24 msec] [ 755 ms] [prepare:tools] [count: 16, ave time 47.19 msec] [ 242 ms] [prepare:AC] [count: 2, ave time 121.00 msec] [ 100 ms] [prepare:put to dist cache] [count: 9, ave time 11.11 msec] [ 10 ms] [prepare:clean] [count: 3, ave time 3.33 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 3780177 ms (100.00%) Total run tasks time - 3780177 ms Configure time - 13.2 s Statistics overhead 753 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/report.json Ok + echo 0 + ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build relwithdebinfo -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.C9chXT6OHK --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends -X --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-relwithdebinfo Configuring dependencies for platform tools Configuring dependencies for platform test_tool_tc1-global Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 2.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd | 6.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/tool | 9.2%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |12.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |12.1%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |12.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |13.6%| PREPARE $(CLANG-874354456) |14.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |15.2%| PREPARE $(CLANG16-1380963495) |15.9%| PREPARE $(TEST_TOOL_HOST-sbr:8580453620) |16.7%| PREPARE $(CLANG_FORMAT-1286082657) |17.4%| [LD] {RESULT} $(B)/ydb/core/kqp/tests/tpch/tpch |18.2%| PREPARE $(WITH_JDK-sbr:7832760150) |18.9%| PREPARE $(CLANG18-1866954364) |19.7%| PREPARE $(PYTHON) |20.5%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |21.2%| PREPARE $(CLANG-1922233694) |22.0%| PREPARE $(WITH_JDK17-sbr:7832760150) |22.7%| PREPARE $(JDK17-472926544) |23.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |24.2%| PREPARE $(FLAKE8_PY3-715603131) |25.0%| PREPARE $(GDB) |25.8%| PREPARE $(YMAKE_PYTHON3-4256832079) |26.5%| PREPARE $(JDK_DEFAULT-472926544) |27.3%| PREPARE $(LLD_ROOT-3808007503) |28.0%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |30.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/tests/tpch/tpch |31.1%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |31.8%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |32.6%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |33.3%| [AR] {RESULT} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |34.1%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |34.8%| COMPACTING CACHE 19.7GiB |35.6%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |36.4%| [LD] {RESULT} $(B)/ydb/tests/stability/tool/tool |40.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |40.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |41.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |42.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |43.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |43.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |44.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |45.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |46.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |47.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |47.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |48.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |49.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |50.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |50.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |51.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |52.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |53.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |53.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |54.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |55.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |56.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |56.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |57.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |59.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |60.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |61.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |62.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |63.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |65.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |65.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable |67.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |68.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |69.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |70.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |71.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |72.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |72.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |73.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |74.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |75.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |76.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |77.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |78.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |81.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change >> tier_delete.py::TestTierDelete::test_delete_s3_ttl >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test >> ttl_unavailable_s3.py::TestUnavailableS3::test >> test_workload.py::TestYdbKvWorkload::test[row] >> unstable_connection.py::TestUnstableConnection::test >> data_correctness.py::TestDataCorrectness::test >> test_log_scenario.py::TestLogScenario::test[180] |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] Test command err: 2025-05-03T10:01:08.803944Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500157373987903415:2075];send_to=[0:7307199536658146131:7762515]; 2025-05-03T10:01:08.803959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/shpt/000038/r3tmp/tmpc7KRtA/pdisk_1.dat 2025-05-03T10:01:08.849053Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20381, node 1 2025-05-03T10:01:08.865881Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-03T10:01:08.865892Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-03T10:01:08.865893Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-03T10:01:08.865919Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12471 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-03T10:01:08.904446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-03T10:01:08.904475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-03T10:01:08.905946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-03T10:01:08.925556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-03T10:01:08.945263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:54002" , at schemeshard: 72057594046644480 2025-05-03T10:01:08.945339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-03T10:01:08.945350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusPreconditionFailed, reason: Column stores are not supported, at schemeshard: 72057594046644480 2025-05-03T10:01:08.946082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusPreconditionFailed Reason: "Column stores are not supported" TxId: 281474976715658 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-05-03T10:01:08.946108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusPreconditionFailed, reason: Column stores are not supported, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-05-03T10:01:08.946465Z node 1 :TX_PROXY ERROR: Actor# [1:7500157373987904324:2595] txid# 281474976715658, issues: { message: "Column stores are not supported" severity: 1 } assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture()+28 (0x1634487C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x164B5259) NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&)+2476 (0x161C2E2C) NTestSuiteYdbLogStore::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x161D3E17) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x164B710E) NTestSuiteYdbLogStore::TCurrentTest::Execute()+429 (0x161D37DD) NUnitTest::TTestFactory::Execute()+803 (0x164B7883) NUnitTest::RunMain(int, char**)+3021 (0x164C942D) ??+0 (0x7FA836BBCD90) __libc_start_main+128 (0x7FA836BBCE40) _start+41 (0x14EAE029) |87.9%| [TA] $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [TA] {RESULT} $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> data_correctness.py::TestDataCorrectness::test [GOOD] >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete [GOOD] >> test_workload.py::TestYdbKvWorkload::test[row] [GOOD] >> test_workload.py::TestYdbKvWorkload::test[column] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/shpt/000055/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/shpt/000055/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 281156 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> data_correctness.py::TestDataCorrectness::test [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/shpt/00005a/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/shpt/00005a/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 281488 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] [GOOD] >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] >> unstable_connection.py::TestUnstableConnection::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/shpt/00005b/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/shpt/00005b/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback !!! simulating S3 hang up -- sending SIGSTOP !!! simulating S3 recovery -- sending SIGCONT contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 281399 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> unstable_connection.py::TestUnstableConnection::test [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/shpt/000052/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/shpt/000052/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 281486 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> tier_delete.py::TestTierDelete::test_delete_s3_ttl [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> tier_delete.py::TestTierDelete::test_delete_s3_ttl [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/shpt/000056/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/shpt/000056/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {'__DEFAULT': 100000}, portions: 2 contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 281079 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [GOOD] >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/kv/tests/py3test >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] |93.9%| [TM] {RESULT} ydb/tests/stress/kv/tests/py3test >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] [GOOD] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] [GOOD] >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] [GOOD] >> test_alter_compression.py::TestAlterCompression::test[alter_compression] >> test_log_scenario.py::TestLogScenario::test[180] [GOOD] >> test_log_scenario.py::TestLogScenario::test[1051200] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [GOOD] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering >> ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/shpt/000054/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/shpt/000054/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 280785 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_alter_compression.py::TestAlterCompression::test[alter_compression] [GOOD] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] [GOOD] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_table] >> test_simple.py::TestSimple::test_multi[alter_table] [GOOD] >> test_simple.py::TestSimple::test[alter_table] >> test_simple.py::TestSimple::test[alter_table] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test[alter_tablestore] >> test_simple.py::TestSimple::test[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test_multi[table] [GOOD] >> test_simple.py::TestSimple::test[table] >> test_simple.py::TestSimple::test[table] [GOOD] >> test_simple.py::TestSimple::test_multi[tablestores] [GOOD] >> test_simple.py::TestSimple::test[tablestores] >> test_simple.py::TestSimple::test[tablestores] [GOOD] >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] [GOOD] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] >> test_log_scenario.py::TestLogScenario::test[1051200] [FAIL] >> zip_bomb.py::TestZipBomb::test >> zip_bomb.py::TestZipBomb::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test 2025-05-03 10:11:08,650 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-03 10:11:08,700 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 277682 169M 173M 114M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/shpt/000059/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 277860 966M 968M 706M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/shpt/000059/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1 281053 102M 102M 74.1M └─ moto_server s3 --port 24616 Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/shpt/000059/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/shpt/000059/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 171, in test if not self.wait_for( File "ydb/tests/olap/ttl_tiering/base.py", line 88, in wait_for time.sleep(1) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/shpt/000059/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/shpt/000059/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/shpt/000059', '--source-root', '/home/runner/.ya/build/build_root/shpt/000059/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/shpt/000059/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '1', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/shpt/000059/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/shpt/000059/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/shpt/000059', '--source-root', '/home/runner/.ya/build/build_root/shpt/000059/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/shpt/000059/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '1', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/py3test >> zip_bomb.py::TestZipBomb::test [GOOD] Test command err: Pid 290769 upsert #0 ok, result: [] upsert #1 ok, result: [] Rss after upsert 643712 [{'column0': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx1', 'column1': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx2', 'column2': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ... xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx3', 'column3': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx4', 'column4': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx5'}] Max rss {} 3776864 |97.0%| [TA] $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/py3test >> test_alter_tiering.py::TestAlterTiering::test[many_tables] 2025-05-03 10:11:08,831 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-03 10:11:09,033 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 277766 4.4G 4.3G 4.3G ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/shpt/00004d/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules 290026 1.7G 1.7G 1.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/shpt/00004d/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alte Test command err: test_suffix, num 0, table path read_data_during_bulk_upsert0 start_time 1746266472.5290973 test_suffix, num 1, table path read_data_during_bulk_upsert1 start_time 1746266472.5293722 test_suffix, num 2, table path read_data_during_bulk_upsert2 start_time 1746266472.5333345 test_suffix, num 3, table path read_data_during_bulk_upsert3 start_time 1746266472.5356212 test_suffix, num 4, table path read_data_during_bulk_upsert4 start_time 1746266472.5362968 test_suffix, num 5, table path read_data_during_bulk_upsert5 start_time 1746266472.5370162 test_suffix, num 6, table path read_data_during_bulk_upsert6 start_time 1746266472.5384884 test_suffix, num 7, table path read_data_during_bulk_upsert7 start_time 1746266472.538757 test_suffix, num 8, table path read_data_during_bulk_upsert8 start_time 1746266472.542942 test_suffix, num 9, table path read_data_during_bulk_upsert9 start_time 1746266472.5442195 Path read_data_during_bulk_upsert0 removed Path read_data_during_bulk_upsert1 removed Path read_data_during_bulk_upsert2 removed Path read_data_during_bulk_upsert4 removed Path read_data_during_bulk_upsert5 removed Path read_data_during_bulk_upsert3 removed Path read_data_during_bulk_upsert6 removed Path read_data_during_bulk_upsert7 removed Path read_data_during_bulk_upsert9 removed Path read_data_during_bulk_upsert8 removed Path read_data_during_bulk_upsert3 removed Path read_data_during_bulk_upsert1 removed Path read_data_during_bulk_upsert6 removed Path read_data_during_bulk_upsert8 removed Path read_data_during_bulk_upsert5 removed Path read_data_during_bulk_upsert9 removed Path read_data_during_bulk_upsert4 removed Path read_data_during_bulk_upsert7 removed Path read_data_during_bulk_upsert0 removed Path read_data_during_bulk_upsert2 removed test_suffix, num 0, table path read_data_during_bulk_upsert start_time 1746266589.230834 Path read_data_during_bulk_upsert removed Path read_data_during_bulk_upsert removed test_suffix, num 0, table path create_and_drop_tables start_time 1746266690.374665 Path create_and_drop_tables removed Path create_and_drop_tables removed test_suffix, num 0, table path alter_compression start_time 1746266706.9124553 Path alter_compression removed Path alter_compression removed test_suffix, num 0, table path read_update_write_load start_time 1746266892.732629 Path read_update_write_load removed Was written: 0.0 MiB, Speed: 0.0 MiB/s Step 1. only write Write: 10% 2710 30% 2710 50% 2710 90% 2710 99% 2710 ms Write: 10% 3176 30% 3176 50% 3176 90% 3176 99% 3176 ms Write: 10% 5863 30% 5863 50% 5863 90% 5863 99% 5863 ms Write: 10% 5931 30% 5931 50% 5931 90% 5931 99% 5931 ms Write: 10% 6035 30% 6035 50% 6035 90% 6035 99% 6035 ms Write: 10% 5806 30% 5806 50% 5806 90% 5806 99% 5806 ms Write: 10% 6000 30% 6000 50% 6000 90% 6000 99% 6000 ms Write: 10% 5766 30% 5766 50% 5766 90% 5766 99% 5766 ms Write: 10% 5720 30% 5720 50% 5720 90% 5720 99% 5720 ms Write: 10% 5748 30% 5748 50% 5748 90% 5748 99% 5748 ms Write: 10% 5563 30% 5563 50% 5563 90% 5563 99% 5563 ms Write: 10% 5529 30% 5529 50% 5529 90% 5529 99% 5529 ms Write: 10% 5492 30% 5492 50% 5492 90% 5492 99% 5492 ms Write: 10% 5473 30% 5473 50% 5473 90% 5473 99% 5473 ms Write: 10% 5308 30% 5308 50% 5308 90% 5308 99% 5308 ms Write: 10% 5256 30% 5256 50% 5256 90% 5256 99% 5256 ms Write: 10% 4981 30% 4981 50% 4981 90% 4981 99% 4981 ms Write: 10% 5104 30% 5104 50% 5104 90% 5104 99% 5104 ms Write: 10% 4920 30% 4920 50% 4920 90% 4920 99% 4920 ms Write: 10% 4533 30% 4533 50% 4533 90% 4533 99% 4533 ms Write: 10% 4624 30% 4624 50% 4624 90% 4624 99% 4624 ms Write: 10% 4474 30% 4474 50% 4474 90% 4474 99% 4474 ms Write: 10% 4375 30% 4375 50% 4375 90% 4375 99% 4375 ms Write: 10% 4287 30% 4287 50% 4287 90% 4287 99% 4287 ms Write: 10% 4251 30% 4251 50% 4251 90% 4251 99% 4251 ms Write: 10% 4372 30% 4372 50% 4372 90% 4372 99% 4372 ms Write: 10% 3863 30% 3863 50% 3863 90% 3863 99% 3863 ms Write: 10% 3910 30% 3910 50% 3910 90% 3910 99% 3910 ms Write: 10% 3792 30% 3792 50% 3792 90% 3792 99% 3792 ms Write: 10% 3589 30% 3589 50% 3589 90% 3589 99% 3589 ms Write: 10% 3543 30% 3543 50% 3543 90% 3543 99% 3543 ms Write: 10% 3514 30% 3514 50% 3514 90% 3514 99% 3514 ms Write: 10% 3444 30% 3444 50% 3444 90% 3444 99% 3444 ms Write: 10% 3415 30% 3415 50% 3415 90% 3415 99% 3415 ms Write: 10% 3349 30% 3349 50% 3349 90% 3349 99% 3349 ms Write: 10% 3289 30% 3289 50% 3289 90% 3289 99% 3289 ms Write: 10% 3436 30% 3436 50% 3436 90% 3436 99% 3436 ms Write: 10% 3319 30% 3319 50% 3319 90% 3319 99% 3319 ms Write: 10% 2743 30% 2743 50% 2743 90% 2743 99% 2743 ms Write: 10% 2571 30% 2571 50% 2571 90% 2571 99% 2571 ms Write: 10% 2772 30% 2772 50% 2772 90% 2772 99% 2772 ms Write: 10% 2393 30% 2393 50% 2393 90% 2393 99% 2393 ms Write: 10% 2587 30% 2587 50% 2587 90% 2587 99% 2587 ms Write: 10% 2340 30% 2340 50% 2340 90% 2340 99% 2340 ms Write: 10% 2546 30% 2546 50% 2546 90% 2546 99% 2546 ms Write: 10% 2278 30% 2278 50% 2278 90% 2278 99% 2278 ms Write: 10% 2282 30% 2282 50% 2282 90% 2282 99% 2282 ms Write: 10% 4433 30% 4433 50% 4433 90% 4433 99% 4433 ms Write: 10% 2211 30% 2211 50% 2211 90% 2211 99% 2211 ms Write: 10% 1755 30% 1755 50% 1755 90% 1755 99% 1755 ms Write: 10% 1598 30% 1598 50% 1598 90% 1598 99% 1598 ms Write: 10% 1352 30% 1352 50% 1352 90% 1352 99% 1352 ms Write: 10% 1798 30% 1798 50% 1798 90% 1798 99% 1798 ms Write: 10% 1414 30% 1414 50% 1414 90% 1414 99% 1414 ms Write: 10% 1252 30% 1252 50% 1252 90% 1252 99% 1252 ms Write: 10% 2325 30% 2325 50% 2325 90% 2325 99% 2325 ms Write: 10% 1090 30% 1090 50% 1090 90% 1090 99% 1090 ms Write: 10% 1112 30% 1112 50% 1112 90% 1112 99% 1112 ms Write: 10% 1132 30% 1132 50% 1132 90% 1132 99% 1132 ms Write: 10% 1138 30% 1138 50% 1138 90% 1138 99% 1138 ms Write: 10% 977 30% 977 50% 977 90% 977 99% 977 ms Write: 10% 1040 30% 1040 50% 1040 90% 1040 99% 1040 ms Write: 10% 947 30% 947 50% 947 90% 947 99% 947 ms Write: 10% 867 30% 867 50% 867 90% 867 99% 867 ms Step 2. read write Write: 10% 62 30% 62 50% 62 90% 62 99% 62 ms Write: 10% 1793 30% 1793 50% 1793 90% 1793 99% 1793 ms Write: 10% 5764 30% 5764 50% 5764 90% 5764 99% 5764 ms Write: 10% 5650 30% 5650 50% 5650 90% 5650 99% 5650 ms Write: 10% 5704 30% 5704 50% 5704 90% 5704 99% 5704 ms Write: 10% 5523 30% 5523 50% 5523 90% 5523 99% 5523 ms Write: 10% 5481 30% 5481 50% 5481 90% 5481 99% 5481 ms Write: 10% 5471 30% 5471 50% 5471 90% 5471 99% 5471 ms Write: 10% 5457 30% 5457 50% 5457 90% 5457 99% 5457 ms Write: 10% 5463 30% 5463 50% 5463 90% 5463 99% 5463 ms Write: 10% 5387 30% 5387 50% 5387 90% 5387 99% 5387 ms Write: 10% 5471 30% 5471 50% 5471 90% 5471 99% 5471 ms Write: 10% 5322 30% 5322 50% 5322 90% 5322 99% 5322 ms Write: 10% 5337 30% 5337 50% 5337 90% 5337 99% 5337 ms Write: 10% 5027 30% 5027 50% 5027 90% 5027 99% 5027 ms Write: 10% 5172 30% 5172 50% 5172 90% 5172 99% 5172 ms Write: 10% 5100 30% 5100 50% 5100 90% 5100 99% 5100 ms Write: 10% 5025 30% 5025 50% 5025 90% 5025 99% 5025 ms Write: 10% 4647 30% 4647 50% 4647 90% 4647 99% 4647 ms Write: 10% 4741 30% 4741 50% 4741 90% 4741 99% 4741 ms Write: 10% 4264 30% 4264 50% 4264 90% 4264 99% 4264 ms Write: 10% 4519 30% 4519 50% 4519 90% 4519 99% 4519 ms Write: 10% 4929 30% 4929 50% 4929 90% 4929 99% 4929 ms Write: 10% 4145 30% 4145 50% 4145 90% 4145 99% 4145 ms Write: 10% 4415 30% 4415 50% 4415 90% 4415 99% 4415 ms Write: 10% 4316 30% 4316 50% 4316 90% 4316 99% 4316 ms Write: 10% 3974 30% 3974 50% 3974 90% 3974 99% 3974 ms Write: 10% 4290 30% 4290 50% 4290 90% 4290 99% 4290 ms Write: 10% 3897 30% 3897 50% 3897 90% 3897 99% 3897 ms Write: 10% 3630 30% 3630 50% 3630 90% 3630 99% 3630 ms Write: 10% 3446 30% 3446 50% 3446 90% 3446 99% 3446 ms Write: 10% 4014 30% 4014 50% 4014 90% 4014 99% 4014 ms Write: 10% 3113 30% 3113 50% 3113 90% 3113 99% 3113 ms Write: 10% 3156 30% 3156 50% 3156 90% 3156 99% 3156 ms Write: 10% 2776 30% 2776 50% 2776 90% 2776 99% 2776 ms Write: 10% 3150 30% 3150 50% 3150 90% 3150 99% 3150 ms Write: 10% 2598 30% 2598 50% 2598 90% 2598 99% 2598 ms Write: 10% 2753 30% 2753 50% 2753 90% 2753 99% 2753 ms Write: 10% 3011 30% 3011 50% 3011 90% 3011 99% 3011 ms Write: 10% 2530 30% 2530 50% 2530 90% 2530 99% 2530 ms Write: 10% 2560 30% 2560 50% 2560 90% 2560 99% 2560 ms Write: 10% 2731 30% 2731 50% 2731 90% 2731 99% 2731 ms Write: 10% 2465 30% 2465 50% 2465 90% 2465 99% 2465 ms Write: 10% 2688 30% 2688 50% 2688 90% 2688 99% 2688 ms Write: 10% 3006 30% 3006 50% 3006 90% 3006 99% 3006 ms Write: 10% 2177 30% 2177 50% 2177 90% 2177 99% 2177 ms Write: 10% 2050 30% 2050 50% 2050 90% 2050 99% 2050 ms Write: 10% 2210 30% 2210 50% 2210 90% 2210 99% 2210 ms Write: 10% 1784 30% 1784 50% 1784 90% 1784 99% 1784 ms Write: 10% 2228 30% 2228 50% 2228 90% 2228 99% 2228 ms Write: 10% 1655 30% 1655 50% 1655 90% 1655 99% 1655 ms Write: 10% 1416 30% 1416 50% 1416 90% 1416 99% 1416 ms Write: 10% 2216 30% 2216 50% 2216 90% 2216 99% 2216 ms Write: 10% 1124 30% 1124 50% 1124 90% 1124 99% 1124 ms Write: 10% 1065 30% 1065 50% 1065 90% 1065 99% 1065 ms Write: 10% 1074 30% 1074 50% 1074 90% 1074 99% 1074 ms Write: 10% 1403 30% 1403 50% 1403 90% 1403 99% 1403 ms Write: 10% 1483 30% 1483 50% 1483 90% 1483 99% 1483 ms Write: 10% 1227 30% 1227 50% 1227 90% 1227 99% 1227 ms Write: 10% 1225 30% 1225 50% 1225 90% 1225 99% 1225 ms Write: 10% 1015 30% 1015 50% 1015 90% 1015 99% 1015 ms Write: 10% 1358 30% 1358 50% 1358 90% 1358 99% 1358 ms Write: 10% 1324 30% 1324 50% 1324 90% 1324 99% 1324 ms Write: 10% 1073 30% 1073 50% 1073 90% 1073 99% 1073 ms Read: 10% 10288 30% 10288 50% 10288 90% 10288 99% 10288 ms Step 3. write modify Write: 10% 2696 30% 2696 50% 2696 90% 2696 99% 2696 ms Write: 10% 3557 30% 3557 50% 3557 90% 3557 99% 3557 ms Write: 10% 5343 30% 5343 50% 5343 90% 5343 99% 5343 ms Write: 10% 5856 30% 5856 50% 5856 90% 5856 99% 5856 ms Write: 10% 6684 30% 6684 50% 6684 90% 6684 99% 6684 ms Write: 10% 6728 30% 6728 50% 6728 90% 6728 99% 6728 ms Write: 10% 6699 30% 6699 50% 6699 90% 6699 99% 6699 ms Write: 10% 6745 30% 6745 50% 6745 90% 6745 99% 6745 ms Write: 10% 6767 30% 6767 50% 6767 90% 6767 99% 6767 ms Write: 10% 6358 30% 6358 50% 6358 90% 6358 99% 6358 ms Write: 10% 6656 30% 6656 50% 6656 90% 6656 99% 6656 ms Write: 10% 6498 30% 6498 50% 6498 90% 6498 99% 6498 ms Write: 10% 6831 30% 6831 50% 6831 90% 6831 99% 6831 ms Write: 10% 6454 30% 6454 50% 6454 90% 6454 99% 6454 ms Write: 10% 6249 30% 6249 50% 6249 90% 6249 99% 6249 ms Write: 10% 6086 30% 6086 50% 6086 90% 6086 99% 6086 ms Write: 10% 6295 30% 6295 50% 6295 90% 6295 99% 6295 ms Write: 10% 5956 30% 5956 50% 5956 90% 5956 99% 5956 ms Write: 10% 6120 30% 6120 50% 6120 90% 6120 99% 6120 ms Write: 10% 5826 30% 5826 50% 5826 90% 5826 99% 5826 ms Write: 10% 5852 30% 5852 50% 5852 90% 5852 99% 5852 ms Write: 10% 5748 30% 5748 50% 5748 90% 5748 99% 5748 ms Write: 10% 6098 30% 6098 50% 6098 90% 6098 99% 6098 ms Write: 10% 5838 30% 5838 50% 5838 90% 5838 99% 5838 ms Write: 10% 5785 30% 5785 50% 5785 90% 5785 99% 5785 ms Write: 10% 5800 30% 5800 50% 5800 90% 5800 99% 5800 ms Write: 10% 5834 30% 5834 50% 5834 90% 5834 99% 5834 ms Write: 10% 5801 30% 5801 50% 5801 90% 5801 99% 5801 ms Write: 10% 5775 30% 5775 50% 5775 90% 5775 99% 5775 ms Write: 10% 5566 30% 5566 50% 5566 90% 5566 99% 5566 ms Write: 10% 5333 30% 5333 50% 5333 90% 5333 99% 5333 ms Write: 10% 5157 30% 5157 50% 5157 90% 5157 99% 5157 ms Write: 10% 5367 30% 5367 50% 5367 90% 5367 99% 5367 ms Write: 10% 5303 30% 5303 50% 5303 90% 5303 99% 5303 ms Write: 10% 5005 30% 5005 50% 5005 90% 5005 99% 5005 ms Write: 10% 5238 30% 5238 50% 5238 90% 5238 99% 5238 ms Write: 10% 4888 30% 4888 50% 4888 90% 4888 99% 4888 ms Write: 10% 4865 30% 4865 50% 4865 90% 4865 99% 4865 ms Write: 10% 4753 30% 4753 50% 4753 90% 4753 99% 4753 ms Write: 10% 5011 30% 5011 50% 5011 90% 5011 99% 5011 ms Write: 10% 4666 30% 4666 50% 4666 90% 4666 99% 4666 ms Write: 10% 4610 30% 4610 50% 4610 90% 4610 99% 4610 ms Write: 10% 4015 30% 4015 50% 4015 90% 4015 99% 4015 ms Write: 10% 4463 30% 4463 50% 4463 90% 4463 99% 4463 ms Write: 10% 3881 30% 3881 50% 3881 90% 3881 99% 3881 ... 3912 90% 3912 99% 3912 ms Write: 10% 3739 30% 3739 50% 3739 90% 3739 99% 3739 ms Write: 10% 3957 30% 3957 50% 3957 90% 3957 99% 3957 ms Write: 10% 3711 30% 3711 50% 3711 90% 3711 99% 3711 ms Write: 10% 3744 30% 3744 50% 3744 90% 3744 99% 3744 ms Write: 10% 3621 30% 3621 50% 3621 90% 3621 99% 3621 ms Write: 10% 3535 30% 3535 50% 3535 90% 3535 99% 3535 ms Write: 10% 3670 30% 3670 50% 3670 90% 3670 99% 3670 ms Write: 10% 3357 30% 3357 50% 3357 90% 3357 99% 3357 ms Write: 10% 3470 30% 3470 50% 3470 90% 3470 99% 3470 ms Write: 10% 3475 30% 3475 50% 3475 90% 3475 99% 3475 ms Write: 10% 3295 30% 3295 50% 3295 90% 3295 99% 3295 ms Write: 10% 3292 30% 3292 50% 3292 90% 3292 99% 3292 ms Update: 10% 714 30% 714 50% 714 90% 714 99% 714 ms Read: 10% 12963 30% 12963 50% 12963 90% 12963 99% 12963 ms Path read_update_write_load removed test_suffix, num 0, table path alter_table start_time 1746266948.2631276 Path alter_table removed Path alter_table removed test_suffix, num 0, table path alter_tablestore start_time 1746266948.752907 Path alter_tablestore removed Path alter_tablestore removed test_suffix, num 0, table path table start_time 1746266949.2288752 Path table removed Path table removed test_suffix, num 0, table path tablestores start_time 1746266949.5496707 Path tablestores removed Path tablestores removed test_suffix, num 0, table path many_tables start_time 1746266963.8078136 Path many_tables removed contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/moto/py3/moto/s3/models.py:122: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "/home/runner/.ya/build/build_root/shpt/00004d/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 100, in test self._test_suffix(ctx, get_external_param("table_suffix", ""), exit_codes, 0) File "/home/runner/.ya/build/build_root/shpt/00004d/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 110, in _test_suffix ctx.executable(self, ctx) File "ydb/tests/olap/scenario/test_alter_tiering.py", line 356, in scenario_many_tables threads.start_and_wait_all() File "ydb/tests/olap/common/thread_helper.py", line 49, in start_and_wait_all self.join_all() File "ydb/tests/olap/common/thread_helper.py", line 45, in join_all thread.join(timeout=timeout) File "ydb/tests/olap/common/thread_helper.py", line 16, in join super().join(timeout) File "contrib/tools/python3/Lib/threading.py", line 1149, in join self._wait_for_tstate_lock() File "contrib/tools/python3/Lib/threading.py", line 1169, in _wait_for_tstate_lock if lock.acquire(block, timeout): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Caught base exception, num 0 message Graceful shutdown requested contrib/python/moto/py3/moto/s3/models.py:122: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/build/build_root/shpt/00004d/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/shpt/00004d/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/shpt/00004d/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/shpt/00004d', '--source-root', '/home/runner/.ya/build/build_root/shpt/00004d/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/shpt/00004d/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/build/build_root/shpt/00004d/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/shpt/00004d/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/shpt/00004d/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/shpt/00004d', '--source-root', '/home/runner/.ya/build/build_root/shpt/00004d/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/shpt/00004d/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) |98.5%| [TM] {RESULT} ydb/tests/olap/py3test |99.2%| [TA] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} |99.2%| [TM] {RESULT} ydb/tests/olap/scenario/py3test |99.9%| CLEANING BUILD ROOT ydb/tests/olap [size:medium] ------ sole chunk ran 3 tests (total:617.91s - test:577.36s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 10.6G (11154600K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 277669 44.8M 44.8M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 277724 33.3M 21.4M 9.0M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 277755 4.6G 4.6G 4.5G └─ ydb-tests-olap --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules 277960 5.7G 5.7G 5.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/test-results/py3test/testing_out_stuff/test_log_sce 282929 73.5M 75.1M 43.7M ├─ ydb -e grpc://localhost:22819 -d /Root workload log --path log run bulk_upsert --seconds 30 --threads 10 --rows 500 --timestamp_deviation 180 282930 128M 129M 98.5M ├─ ydb -e grpc://localhost:22819 -d /Root workload log --path log run insert --seconds 30 --threads 10 --rows 500 --timestamp_deviation 180 282931 131M 133M 101M └─ ydb -e grpc://localhost:22819 -d /Root workload log --path log run upsert --seconds 30 --threads 10 --rows 500 --timestamp_deviation 180 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/test-results/py3test/testing_out_stuff/stderr [fail] test_log_scenario.py::TestLogScenario::test[1051200] [default-linux-x86_64-relwithdebinfo] (271.44s) ydb/tests/olap/test_log_scenario.py:152: in test thread.join() ydb/tests/olap/common/thread_helper.py:18: in join raise self.exc ydb/tests/olap/common/thread_helper.py:11: in run self.ret = self._target(*self._args, **self._kwargs) ydb/tests/olap/test_log_scenario.py:118: in aggregation_query self.ydb_client.query(f"SELECT * FROM `{self.table_name}` WHERE timestamp < CurrentUtcTimestamp() - DateTime::IntervalFromHours({hours})") ydb/tests/olap/common/ydb_client.py:24: in query return self.session_pool.execute_with_retries(statement) contrib/python/ydb/py3/ydb/query/pool.py:204: in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/query/pool.py:202: in wrapped_callee return [result_set for result_set in it] contrib/python/ydb/py3/ydb/_utilities.py:173: in __next__ return self._next() contrib/python/ydb/py3/ydb/_utilities.py:164: in _next res = self.wrapper(next(self.it)) contrib/python/grpcio/py3/grpc/_channel.py:475: in __next__ return self._next() contrib/python/grpcio/py3/grpc/_channel.py:881: in _next raise self E grpc._channel._MultiThreadedRendezvous: <_MultiThreadedRendezvous of RPC that terminated with: E status = StatusCode.UNAVAILABLE E details = "Socket closed" E debug_error_string = "UNKNOWN:Error received from peer ipv4:127.0.1.1:22819 {grpc_message:"Socket closed", grpc_status:14, created_time:"2025-05-03T10:09:46.751171143+00:00", file_line:1211, file:"/home/runner/actions_runner/_work/ydb/ydb/contrib/libs/grpc/src/core/lib/surface/call.cc"}" E >teardown failed: ydb/tests/olap/test_log_scenario.py:90: in teardown_class cls.cluster.stop() ydb/tests/library/harness/kikimr_runner.py:565: in stop raise daemon.SeveralDaemonErrors(saved_exceptions) E ydb.tests.library.harness.daemon.SeveralDaemonErrors: Daemon failed with message: Unexpectedly finished before stop. E Process exit_code = -11. E Stdout file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/test-results/py3test/testing_out_stuff/test_log_scenario.py.TestLogScenario.test.180/cluster/node_1/stdout E Stderr file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/test-results/py3test/testing_out_stuff/test_log_scenario.py.TestLogScenario.test.180/cluster/node_1/stderr E Stderr content: E E GRpc memory quota was set but disabled due to issues with grpc quoter, to enable it use EnableGRpcMemoryQuota option Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/test-results/py3test/testing_out_stuff/test_log_scenario.py.TestLogScenario.test.1051200.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/test-results/py3test/testing_out_stuff ------ FAIL: 2 - GOOD, 1 - FAIL ydb/tests/olap ydb/tests/olap/scenario [size:medium] ------ sole chunk ran 18 tests (total:630.16s - test:600.04s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_alter_compression.py::TestAlterCompression::test[alter_compression] (good) duration: 183.97s test_alter_tiering.py::TestAlterTiering::test[many_tables] (timeout) duration: 133.25s test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] (good) duration: 118.77s test_insert.py::TestInsert::test[read_data_during_bulk_upsert] (good) duration: 99.22s test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] (good) duration: 53.68s test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] (good) duration: 14.61s test_simple.py::TestSimple::test[tablestores] (good) duration: 12.35s test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] (good) duration: 1.93s test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] (good) duration: 1.92s test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] (good) duration: 1.90s 8 more tests with 4.98s total duration are not listed. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [timeout] test_alter_tiering.py::TestAlterTiering::test[many_tables] [default-linux-x86_64-relwithdebinfo] (133.25s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alter_tiering.py.TestAlterTiering.test.many_tables.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 17 - GOOD, 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] nchunks:10 ------ [1/10] chunk ran 1 test (total:605.20s - test:600.08s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test (timeout) duration: 604.03s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-relwithdebinfo] (604.03s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ TIMEOUT: 7 - GOOD, 1 - TIMEOUT ydb/tests/olap/ttl_tiering ------ sole chunk ran 2 tests (total:211.65s - test:211.60s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 10.1G (10629620K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 277656 44.8M 44.8M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 277753 32.5M 20.9M 8.2M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 277768 180M 181M 128M └─ ydb-tests-stress-kv-tests --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doct 277862 1.1G 1.1G 870M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 277863 1.1G 1.1G 906M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 277864 1.1G 1.1G 883M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 277865 1.3G 1.3G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 277866 1.1G 1.1G 882M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 277867 1.0G 1.0G 853M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 277868 1.1G 1.1G 874M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 277869 1.0G 1.0G 817M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 277870 1.0G 1.0G 835M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/t 286068 42.1M 0b 0b └─ ydb --verbose --endpoint grpc://localhost:31376 --database=/Root workload kv run mixed --seconds 100 --threads 10 --cols 5 --len 200 --int-cols 2 --key-cols 3 --pa Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kv/tests/test-results/py3test/testing_out_stuff/stderr ydb/services/ydb/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:3.75s - test:3.72s) [fail] YdbLogStore::AlterLogTable [default-linux-x86_64-relwithdebinfo] (0.87s) assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:14 Get at /-S/util/generic/ptr.h:595:16 ~TStringBuilder at /-S/util/string/builder.h:8:11 NTestSuiteYdbLogStore::TCurrentTest::Execute()::'lambda'()::operator()() const at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:1 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:640:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.out ------ FAIL: 1 - FAIL ydb/services/ydb/ut Total 5 suites: 1 - GOOD 2 - FAIL 2 - TIMEOUT Total 32 tests: 28 - GOOD 2 - FAIL 2 - TIMEOUT Cache efficiency ratio is 99.79% (35158 of 35233). Local: 15 (0.04%), dist: 0 (0.00%), by dynamic uids: 0 (0.00%), avoided: 35143 (99.74%) Dist cache download: count=0, size=0 bytes, speed=0.0 bytes/s Disk usage for tools/sdk 3.64 GiB Additional disk space consumed for build cache 0 bytes Critical path: [605584 ms] [TM] [rnd-8678209458917180601 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 0 (1746266468221), finished: 605584 (1746267073805)] [ 44182 ms] [TA] [rnd-shauqlyj7v41u7cc]: $(BUILD_ROOT)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} [started: 605605 (1746267073826), finished: 649787 (1746267118008)] Time from start: 658538.6770019531 ms, time elapsed by graph 649766 ms, time diff 8772.677001953125 ms. The longest 10 tasks: [649507 ms] [TM] [rnd-gb1wefk8fkmfpfhk default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/scenario/py3test [started: 1746266468519, finished: 1746267118026] [618191 ms] [TM] [rnd-xos161qyrv0q8aef default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/py3test [started: 1746266468541, finished: 1746267086732] [605584 ms] [TM] [rnd-8678209458917180601 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746266468221, finished: 1746267073805] [384394 ms] [TM] [rnd-4795624059125515412 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746266468220, finished: 1746266852614] [211974 ms] [TM] [rnd-lgzz2o4sbsxezry5 default-linux-x86_64 relwithdebinfo]: ydb/tests/stress/kv/tests/py3test [started: 1746266468511, finished: 1746266680485] [196106 ms] [TM] [rnd-7001779787371391532 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746266468225, finished: 1746266664331] [146618 ms] [TM] [rnd-15315347981278064669 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746266468254, finished: 1746266614872] [134401 ms] [TM] [rnd-10044851859348882276 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746266468212, finished: 1746266602613] [111010 ms] [TM] [rnd-9686285481183424945 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746266468318, finished: 1746266579328] [107436 ms] [TM] [rnd-4826238080509976293 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746266468207, finished: 1746266575643] Total time by type: [3244717 ms] [TM] [count: 73, ave time 44448.18 msec] [ 44448 ms] [TA] [count: 2, ave time 22224.00 msec] [ 2913 ms] [prepare:bazel-store] [count: 1, ave time 2913.00 msec] [ 1858 ms] [prepare:get from local cache] [count: 15, ave time 123.87 msec] [ 906 ms] [prepare:tools] [count: 16, ave time 56.62 msec] [ 249 ms] [prepare:AC] [count: 2, ave time 124.50 msec] [ 124 ms] [prepare:put to dist cache] [count: 9, ave time 13.78 msec] [ 64 ms] [prepare:clean] [count: 3, ave time 21.33 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 3289165 ms (100.00%) Total run tasks time - 3289165 ms Configure time - 14.0 s Statistics overhead 751 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/report.json Ok + echo 0